diff --git a/config_example.py b/config_example.py index 443f5acd..ef7578d9 100644 --- a/config_example.py +++ b/config_example.py @@ -9,6 +9,6 @@ STATIC_ROOT = 'collected-static' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', - 'NAME': 'passerelle.sqlite3', + 'NAME': 'passerelle.sqlite3', } } diff --git a/debian/settings.py b/debian/settings.py index 0455cfd0..832fd910 100644 --- a/debian/settings.py +++ b/debian/settings.py @@ -12,12 +12,12 @@ # This file is sourced by "execfile" from /usr/lib/passerelle/debian_config.py # SECURITY WARNING: don't run with debug turned on in production! -#DEBUG = False +# DEBUG = False -#ADMINS = ( +# ADMINS = ( # ('User 1', 'poulpe@example.org'), # ('User 2', 'janitor@example.net'), -#) +# ) # ALLOWED_HOSTS must be correct in production! # See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts @@ -29,26 +29,26 @@ ALLOWED_HOSTS = ['*'] # Database # Warning: don't change ENGINE, it must be 'tenant_schemas.postgresql_backend' -#DATABASES['default']['NAME'] = 'passerelle' -#DATABASES['default']['USER'] = 'passerelle' -#DATABASES['default']['PASSWORD'] = '******' -#DATABASES['default']['HOST'] = 'localhost' -#DATABASES['default']['PORT'] = '5432' +# DATABASES['default']['NAME'] = 'passerelle' +# DATABASES['default']['USER'] = 'passerelle' +# DATABASES['default']['PASSWORD'] = '******' +# DATABASES['default']['HOST'] = 'localhost' +# DATABASES['default']['PORT'] = '5432' LANGUAGE_CODE = 'fr-fr' TIME_ZONE = 'Europe/Paris' # Email configuration -#EMAIL_SUBJECT_PREFIX = '[passerelle] ' -#SERVER_EMAIL = 'root@passerelle.example.org' -#DEFAULT_FROM_EMAIL = 'webmaster@passerelle.example.org' +# EMAIL_SUBJECT_PREFIX = '[passerelle] ' +# SERVER_EMAIL = 'root@passerelle.example.org' +# DEFAULT_FROM_EMAIL = 'webmaster@passerelle.example.org' # SMTP configuration -#EMAIL_HOST = 'localhost' -#EMAIL_HOST_USER = '' -#EMAIL_HOST_PASSWORD = '' -#EMAIL_PORT = 25 +# EMAIL_HOST = 'localhost' +# EMAIL_HOST_USER = '' +# EMAIL_HOST_PASSWORD = '' +# EMAIL_PORT = 25 # HTTPS -#CSRF_COOKIE_SECURE = True -#SESSION_COOKIE_SECURE = True +# CSRF_COOKIE_SECURE = True +# SESSION_COOKIE_SECURE = True diff --git a/functests/cmis/conftest.py b/functests/cmis/conftest.py index a72fc746..5eac0663 100644 --- a/functests/cmis/conftest.py +++ b/functests/cmis/conftest.py @@ -9,15 +9,16 @@ def pytest_addoption(parser): parser.addoption("--cmis-endpoint", help="Url of a passerelle CMIS endpoint") parser.addoption("--cmis-username", help="Username for the CMIS endpoint") parser.addoption("--cmis-password", help="Password for the CMIS endpoint") - parser.addoption( - "--preserve-tree", action="store_true", default=False, help="Preserve test directory") + parser.addoption("--preserve-tree", action="store_true", default=False, help="Preserve test directory") @pytest.fixture(scope='session') def cmisclient(request): return cmislib.CmisClient( - request.config.getoption("--cmis-endpoint"), request.config.getoption("--cmis-username"), - request.config.getoption("--cmis-password")) + request.config.getoption("--cmis-endpoint"), + request.config.getoption("--cmis-username"), + request.config.getoption("--cmis-password"), + ) @pytest.fixture(scope='session') diff --git a/functests/cmis/tests_cmis.py b/functests/cmis/tests_cmis.py index 715ce1fa..63178e41 100644 --- a/functests/cmis/tests_cmis.py +++ b/functests/cmis/tests_cmis.py @@ -10,11 +10,16 @@ import requests SPECIAL_CHARS = '!#$%&+-^_`;[]{}+=' -@pytest.mark.parametrize("path,file_name", [ - ('', 'some.file'), ('/toto', 'some.file'), ('/toto/tata', 'some.file'), - ('/toto', 'some.other'), - ('/%s' % SPECIAL_CHARS, '%(spe)s.%(spe)s' % {'spe': SPECIAL_CHARS}) -]) +@pytest.mark.parametrize( + "path,file_name", + [ + ('', 'some.file'), + ('/toto', 'some.file'), + ('/toto/tata', 'some.file'), + ('/toto', 'some.other'), + ('/%s' % SPECIAL_CHARS, '%(spe)s.%(spe)s' % {'spe': SPECIAL_CHARS}), + ], +) def test_uploadfile(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, monkeypatch, path, file_name): result_filename = 'result.file' monkeypatch.chdir(tmpdir) @@ -25,9 +30,12 @@ def test_uploadfile(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, monkeypatch with orig_file.open('rb') as f: file_b64_content = base64.b64encode(f.read()) response = requests.post( - url, json={"path": cmis_tmpdir + path, - "file": {"content": file_b64_content, "filename": file_name, - "content_type": "image/jpeg"}}) + url, + json={ + "path": cmis_tmpdir + path, + "file": {"content": file_b64_content, "filename": file_name, "content_type": "image/jpeg"}, + }, + ) assert response.status_code == 200 resp_data = response.json() assert resp_data['err'] == 0 @@ -50,17 +58,23 @@ def test_uploadfile_conflict(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, mo url = urlparse.urljoin(cmis_connector, 'uploadfile') file_b64_content = base64.b64encode('file_content') response = requests.post( - url, json={"path": cmis_tmpdir + '/uploadconflict', - "file": {"content": file_b64_content, "filename": 'some.file', - "content_type": "image/jpeg"}}) + url, + json={ + "path": cmis_tmpdir + '/uploadconflict', + "file": {"content": file_b64_content, "filename": 'some.file', "content_type": "image/jpeg"}, + }, + ) assert response.status_code == 200 resp_data = response.json() assert resp_data['err'] == 0 file_b64_content = base64.b64encode('other_file_content') response = requests.post( - url, json={"path": cmis_tmpdir + '/uploadconflict', - "file": {"content": file_b64_content, "filename": 'some.file', - "content_type": "image/jpeg"}}) + url, + json={ + "path": cmis_tmpdir + '/uploadconflict', + "file": {"content": file_b64_content, "filename": 'some.file', "content_type": "image/jpeg"}, + }, + ) assert response.status_code == 200 resp_data = response.json() assert resp_data['err'] == 1 diff --git a/functests/planitech/conftest.py b/functests/planitech/conftest.py index 314ff5e1..6777d533 100644 --- a/functests/planitech/conftest.py +++ b/functests/planitech/conftest.py @@ -2,8 +2,7 @@ import pytest def pytest_addoption(parser): - parser.addoption( - "--url", help="Url of a passerelle Planitech connector instance") + parser.addoption("--url", help="Url of a passerelle Planitech connector instance") @pytest.fixture(scope='session') diff --git a/functests/planitech/test_planitech.py b/functests/planitech/test_planitech.py index 2b098fa1..4f6baa4d 100644 --- a/functests/planitech/test_planitech.py +++ b/functests/planitech/test_planitech.py @@ -7,10 +7,9 @@ import requests def test_main(conn): # get days - query_string = parse.urlencode({ - 'start_days': 1, 'end_days': 90, 'start_time': '10:00', 'end_time': '11:00', - 'display': 'date' - }) + query_string = parse.urlencode( + {'start_days': 1, 'end_days': 90, 'start_time': '10:00', 'end_time': '11:00', 'display': 'date'} + ) url = conn + '/getfreegaps?%s' % query_string resp = requests.get(url) resp.raise_for_status() @@ -20,10 +19,9 @@ def test_main(conn): assert data # get places - query_string = parse.urlencode({ - 'start_days': 1, 'end_days': 90, 'start_time': '10:00', 'end_time': '11:00', - 'display': 'place' - }) + query_string = parse.urlencode( + {'start_days': 1, 'end_days': 90, 'start_time': '10:00', 'end_time': '11:00', 'display': 'place'} + ) url = conn + '/getfreegaps?%s' % query_string resp = requests.get(url) resp.raise_for_status() @@ -34,10 +32,16 @@ def test_main(conn): place = data[random.randint(0, len(data) - 1)]['id'] # get days on one place - query_string = parse.urlencode({ - 'start_days': 1, 'end_days': 90, 'start_time': '10:00', 'end_time': '11:00', - 'place_id': place, 'display': 'date' - }) + query_string = parse.urlencode( + { + 'start_days': 1, + 'end_days': 90, + 'start_time': '10:00', + 'end_time': '11:00', + 'place_id': place, + 'display': 'date', + } + ) url = conn + '/getfreegaps?%s' % query_string resp = requests.get(url) resp.raise_for_status() @@ -55,10 +59,19 @@ def test_main(conn): chosen_date = data[0]['id'] # create reservation params = { - 'date': chosen_date, 'start_time': '10:00', 'end_time': '11:00', - 'place_id': place, 'price': 200, 'name_id': 'john-doe', 'type_id': resa_type_id, - 'first_name': 'jon', 'last_name': 'doe', 'activity_id': activity_id, - 'email': 'jon.doe@localhost', 'object': 'reservation object', 'vat_rate': 200 + 'date': chosen_date, + 'start_time': '10:00', + 'end_time': '11:00', + 'place_id': place, + 'price': 200, + 'name_id': 'john-doe', + 'type_id': resa_type_id, + 'first_name': 'jon', + 'last_name': 'doe', + 'activity_id': activity_id, + 'email': 'jon.doe@localhost', + 'object': 'reservation object', + 'vat_rate': 200, } print('Create reservation parameters \n') pprint.pprint(params) @@ -76,9 +89,7 @@ def test_main(conn): reservation_id = data['reservation_id'] # confirm reservation - params = { - 'reservation_id': reservation_id, 'status': 'standard' - } + params = {'reservation_id': reservation_id, 'status': 'standard'} url = conn + '/updatereservation' resp = requests.post(url, json=params) resp.raise_for_status() diff --git a/functests/toulouse_axel/conftest.py b/functests/toulouse_axel/conftest.py index f3a3e5f8..404e867d 100644 --- a/functests/toulouse_axel/conftest.py +++ b/functests/toulouse_axel/conftest.py @@ -2,8 +2,7 @@ import pytest def pytest_addoption(parser): - parser.addoption( - "--url", help="Url of a passerelle Toulouse Axel connector instance") + parser.addoption("--url", help="Url of a passerelle Toulouse Axel connector instance") parser.addoption("--nameid", help="Publik Name ID") parser.addoption("--firstname", help="first name of a user") parser.addoption("--lastname", help="Last name of a user") diff --git a/functests/toulouse_axel/test_toulouse_axel.py b/functests/toulouse_axel/test_toulouse_axel.py index 2da264a8..427eb16d 100644 --- a/functests/toulouse_axel/test_toulouse_axel.py +++ b/functests/toulouse_axel/test_toulouse_axel.py @@ -45,19 +45,58 @@ def test_link(conn, user): payload['DROITALIMAGE'] = 'NON' payload['REVENUS']['CHOIXREVENU'] = '' # remove non editable fields - for key in ['SITUATIONFAMILIALE', 'SITUATIONFAMILIALE_label', 'NBENFANTACTIF', 'NBRLACTIF', 'IDDUI', 'CODEMISEAJOUR', - 'management_dates', 'annee_reference', 'annee_reference_label', 'annee_reference_short']: + for key in [ + 'SITUATIONFAMILIALE', + 'SITUATIONFAMILIALE_label', + 'NBENFANTACTIF', + 'NBRLACTIF', + 'IDDUI', + 'CODEMISEAJOUR', + 'management_dates', + 'annee_reference', + 'annee_reference_label', + 'annee_reference_short', + ]: payload.pop(key) - for key in ['IDPERSONNE', 'NOM', 'PRENOM', 'NOMJEUNEFILLE', 'DATENAISSANCE', 'CIVILITE', 'INDICATEURRL', 'CSP_label']: + for key in [ + 'IDPERSONNE', + 'NOM', + 'PRENOM', + 'NOMJEUNEFILLE', + 'DATENAISSANCE', + 'CIVILITE', + 'INDICATEURRL', + 'CSP_label', + ]: if 'RL1' in payload: payload['RL1'].pop(key) if 'RL2' in payload: payload['RL2'].pop(key) - for key in ['MONTANTTOTAL', 'DATEVALIDITE', 'SFI', 'IREVENUS', 'RNF', 'NBENFANTSACHARGE', 'TYPEREGIME_label']: + for key in [ + 'MONTANTTOTAL', + 'DATEVALIDITE', + 'SFI', + 'IREVENUS', + 'RNF', + 'NBENFANTSACHARGE', + 'TYPEREGIME_label', + ]: payload['REVENUS'].pop(key, None) for enfant in payload['ENFANT']: - for key in ['id', 'text', 'NOM', 'DATENAISSANCE', 'SEXE', 'PRENOMPERE', 'PRENOMMERE', 'NOMPERE', 'NOMMERE', 'RATTACHEAUTREDUI', 'PRENOM', - 'clae_cantine_current']: + for key in [ + 'id', + 'text', + 'NOM', + 'DATENAISSANCE', + 'SEXE', + 'PRENOMPERE', + 'PRENOMMERE', + 'NOMPERE', + 'NOMMERE', + 'RATTACHEAUTREDUI', + 'PRENOM', + 'clae_cantine_current', + ]: enfant.pop(key) enfant['AUTORISATIONURGENCEMEDICALE'] = 'OUI' # manage contact fields @@ -98,8 +137,10 @@ def test_link(conn, user): # add partial update flags flags = [ 'maj:adresse', - 'maj:rl1', 'maj:rl1_adresse_employeur', - 'maj:rl2', 'maj:rl2_adresse_employeur', + 'maj:rl1', + 'maj:rl1_adresse_employeur', + 'maj:rl2', + 'maj:rl2_adresse_employeur', 'maj:revenus', ] for i in range(0, 6): diff --git a/functests/vivaticket/conftest.py b/functests/vivaticket/conftest.py index bf260bed..d50b657f 100644 --- a/functests/vivaticket/conftest.py +++ b/functests/vivaticket/conftest.py @@ -2,8 +2,7 @@ import pytest def pytest_addoption(parser): - parser.addoption( - "--url", help="Url of a passerelle Vivaticket connector instance") + parser.addoption("--url", help="Url of a passerelle Vivaticket connector instance") @pytest.fixture(scope='session') diff --git a/functests/vivaticket/test_vivaticket.py b/functests/vivaticket/test_vivaticket.py index 6a542424..f61979fd 100644 --- a/functests/vivaticket/test_vivaticket.py +++ b/functests/vivaticket/test_vivaticket.py @@ -4,6 +4,7 @@ import datetime import requests import random + def call_generic(conn, endpoint): print("%s \n" % endpoint) url = conn + '/%s' % endpoint @@ -21,17 +22,24 @@ def call_generic(conn, endpoint): def test_get_events(conn): call_generic(conn, 'events') + def test_get_rooms(conn): call_generic(conn, 'rooms') + def test_get_themes(conn): call_generic(conn, 'themes') + def test_book_event(conn): url = conn + '/book' - payload = {'id': 'formid', 'email': 'foo@example.com', - 'datetime': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M'), - 'room': '001', 'theme': 'A0001', 'quantity': 1 + payload = { + 'id': 'formid', + 'email': 'foo@example.com', + 'datetime': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M'), + 'room': '001', + 'theme': 'A0001', + 'quantity': 1, } events = call_generic(conn, 'events') random.shuffle(events) @@ -42,7 +50,7 @@ def test_book_event(conn): themes = call_generic(conn, 'themes') random.shuffle(themes) payload['theme'] = themes[0]['id'] - print("Creating booking with the following payload:\n%s" % payload) + print("Creating booking with the following payload:\n%s" % payload) resp = requests.post(url, json=payload) resp.raise_for_status() res = resp.json() diff --git a/passerelle/apps/actesweb/migrations/0001_initial.py b/passerelle/apps/actesweb/migrations/0001_initial.py index 5d9c0b23..7419c42b 100644 --- a/passerelle/apps/actesweb/migrations/0001_initial.py +++ b/passerelle/apps/actesweb/migrations/0001_initial.py @@ -14,12 +14,38 @@ class Migration(migrations.Migration): migrations.CreateModel( name='ActesWeb', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_actesweb_users_+', related_query_name='+', blank=True)), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_actesweb_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': "ActesWeb - Demande d'acte d'\xe9tat civil", diff --git a/passerelle/apps/actesweb/models.py b/passerelle/apps/actesweb/models.py index 828f1b57..dc7b383a 100644 --- a/passerelle/apps/actesweb/models.py +++ b/passerelle/apps/actesweb/models.py @@ -32,6 +32,8 @@ from passerelle.compat import json_loads from passerelle.utils.api import endpoint from passerelle.utils.jsonresponse import APIError from passerelle.utils.conversion import ensure_encoding + + @contextlib.contextmanager def named_tempfile(*args, **kwargs): with tempfile.NamedTemporaryFile(*args, **kwargs) as fp: @@ -46,8 +48,7 @@ class ActesWeb(BaseResource): @property def basepath(self): - return os.path.join( - default_storage.path('actesweb'), self.slug) + return os.path.join(default_storage.path('actesweb'), self.slug) @endpoint(perm='can_access', methods=['post'], description=_('Create demand')) def create(self, request, *args, **kwargs): @@ -88,6 +89,6 @@ class ActesWeb(BaseResource): tempfile_name = tpf.name os.rename(tempfile_name, filepath) # set read only permission for owner and group - os.chmod(filepath, stat.S_IRUSR|stat.S_IRGRP|stat.S_IWGRP) + os.chmod(filepath, stat.S_IRUSR | stat.S_IRGRP | stat.S_IWGRP) demand_id = '%s_%s' % (application_id, os.path.basename(filepath)) return {'data': {'demand_id': demand_id}} diff --git a/passerelle/apps/airquality/migrations/0001_initial.py b/passerelle/apps/airquality/migrations/0001_initial.py index aeabcabb..7ce18b7a 100644 --- a/passerelle/apps/airquality/migrations/0001_initial.py +++ b/passerelle/apps/airquality/migrations/0001_initial.py @@ -14,12 +14,38 @@ class Migration(migrations.Migration): migrations.CreateModel( name='AirQuality', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_airquality_users_+', related_query_name='+', blank=True)), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_airquality_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'Air Quality', diff --git a/passerelle/apps/airquality/models.py b/passerelle/apps/airquality/models.py index ff03ffa1..6371a7e2 100644 --- a/passerelle/apps/airquality/models.py +++ b/passerelle/apps/airquality/models.py @@ -28,24 +28,28 @@ from passerelle.utils.jsonresponse import APIError class AirQuality(BaseResource): category = _('Misc') - api_description = _(u''' + api_description = _( + u''' This API provides a unique format for the air quality data of various places. (But only supports the Rhône-Alpes region for now). - ''') + ''' + ) - atmo_aura_api_token = models.CharField(max_length=100, - verbose_name=_('ATMO AURA API token'), - blank=True, null=True) + atmo_aura_api_token = models.CharField( + max_length=100, verbose_name=_('ATMO AURA API token'), blank=True, null=True + ) class Meta: verbose_name = _('Air Quality') - @endpoint(pattern='^(?P\w+)/(?P\w+)/$', - example_pattern='{country}/{city}/', - parameters={ - 'country': {'description': _('Country Code'), 'example_value': 'fr'}, - 'city': {'description': _('City Name'), 'example_value': 'lyon'}, - }) + @endpoint( + pattern='^(?P\w+)/(?P\w+)/$', + example_pattern='{country}/{city}/', + parameters={ + 'country': {'description': _('Country Code'), 'example_value': 'fr'}, + 'city': {'description': _('City Name'), 'example_value': 'lyon'}, + }, + ) def details(self, request, country, city, **kwargs): methods = { ('fr', 'albertville'): 'air_rhonealpes', @@ -82,7 +86,8 @@ class AirQuality(BaseResource): 'vienne': '38544', } insee_code = insee_codes.get(city.lower()) - response = self.requests.get('https://api.atmo-aura.fr/communes/%s/indices' % insee_code, + response = self.requests.get( + 'https://api.atmo-aura.fr/communes/%s/indices' % insee_code, params={'api_token': self.atmo_aura_api_token}, ) json_response = response.json() @@ -106,12 +111,13 @@ class AirQuality(BaseResource): break if 'latest' in response_data: - comment_response = self.requests.get('https://api.atmo-aura.fr/commentaire', - params={ - 'date': response_data['latest']['date'], - 'api_token': self.atmo_aura_api_token, - } - ) + comment_response = self.requests.get( + 'https://api.atmo-aura.fr/commentaire', + params={ + 'date': response_data['latest']['date'], + 'api_token': self.atmo_aura_api_token, + }, + ) if comment_response.ok: response_data['comment'] = comment_response.json().get('commentaire') diff --git a/passerelle/apps/api_entreprise/migrations/0001_initial.py b/passerelle/apps/api_entreprise/migrations/0001_initial.py index f1cdc0ca..bf93acf7 100644 --- a/passerelle/apps/api_entreprise/migrations/0001_initial.py +++ b/passerelle/apps/api_entreprise/migrations/0001_initial.py @@ -17,13 +17,29 @@ class Migration(migrations.Migration): migrations.CreateModel( name='APIEntreprise', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), - ('url', models.URLField(default=b'https://entreprise.api.gouv.fr/v2/', max_length=256, verbose_name='API URL')), + ( + 'url', + models.URLField( + default=b'https://entreprise.api.gouv.fr/v2/', max_length=256, verbose_name='API URL' + ), + ), ('token', models.CharField(max_length=1024, verbose_name='API token')), - ('users', models.ManyToManyField(blank=True, related_name='_apientreprise_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_apientreprise_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'API Entreprise', diff --git a/passerelle/apps/api_entreprise/migrations/0002_auto_20190701_1357.py b/passerelle/apps/api_entreprise/migrations/0002_auto_20190701_1357.py index b0dda771..b8bf2d67 100644 --- a/passerelle/apps/api_entreprise/migrations/0002_auto_20190701_1357.py +++ b/passerelle/apps/api_entreprise/migrations/0002_auto_20190701_1357.py @@ -15,8 +15,8 @@ class Migration(migrations.Migration): migrations.AddField( model_name='apientreprise', name='recipient', - field=models.CharField(default='', max_length=1024, verbose_name='Recipient', - help_text='default value' + field=models.CharField( + default='', max_length=1024, verbose_name='Recipient', help_text='default value' ), preserve_default=False, ), diff --git a/passerelle/apps/api_entreprise/models.py b/passerelle/apps/api_entreprise/models.py index e04ce25a..be6c8723 100644 --- a/passerelle/apps/api_entreprise/models.py +++ b/passerelle/apps/api_entreprise/models.py @@ -69,7 +69,7 @@ def normalize_results(data): if tstamp > 0: try: aware_date = make_aware(datetime.fromtimestamp(int(data[key]))) - timestamp_to_datetime[key[:-len('timestamp')] + 'datetime'] = aware_date + timestamp_to_datetime[key[: -len('timestamp')] + 'datetime'] = aware_date except (ValueError, TypeError): pass # add converted timestamps to initial data @@ -81,8 +81,9 @@ class APIEntreprise(BaseResource): url = models.URLField(_('API URL'), max_length=256, default='https://entreprise.api.gouv.fr/v2/') token = models.CharField(max_length=1024, verbose_name=_('API token')) - recipient = models.CharField(max_length=1024, verbose_name=_('Recipient'), blank=False, - help_text=_('default value')) + recipient = models.CharField( + max_length=1024, verbose_name=_('Recipient'), blank=False, help_text=_('default value') + ) category = _('Business Process Connectors') @@ -102,20 +103,20 @@ class APIEntreprise(BaseResource): try: response = self.requests.get(url, data=params, cache_duration=300) except requests.RequestException as e: - raise APIError(u'API-entreprise connection error: %s' % - exception_to_text(e), data=[]) + raise APIError(u'API-entreprise connection error: %s' % exception_to_text(e), data=[]) try: data = response.json() except ValueError as e: content = response.text[:1000] raise APIError( - u'API-entreprise returned non-JSON content with status %s: %s' % - (response.status_code, content), + u'API-entreprise returned non-JSON content with status %s: %s' + % (response.status_code, content), data={ 'status_code': response.status_code, 'exception': exception_to_text(e), 'content': content, - }) + }, + ) if response.status_code != 200: if data.get('error') == 'not_found': return { @@ -123,12 +124,12 @@ class APIEntreprise(BaseResource): 'err_desc': data.get('message', 'not-found'), } raise APIError( - u'API-entreprise returned a non 200 status %s: %s' % - (response.status_code, data), + u'API-entreprise returned a non 200 status %s: %s' % (response.status_code, data), data={ 'status_code': response.status_code, 'content': data, - }) + }, + ) normalize_results(data) return { 'err': 0, @@ -138,13 +139,10 @@ class APIEntreprise(BaseResource): # description of common endpoint parameters ASSOCIATION_PARAM = { 'description': _('association SIREN or RNA/WALDEC number'), - 'example_value': '44317013900036' + 'example_value': '44317013900036', } - CONTEXT_PARAM = { - 'description': _('request context: MPS, APS...'), - 'example_value': 'APS' - } + CONTEXT_PARAM = {'description': _('request context: MPS, APS...'), 'example_value': 'APS'} MONTH_PARAM = { 'description': _('requested month'), @@ -153,12 +151,12 @@ class APIEntreprise(BaseResource): OBJECT_PARAM = { 'description': _('request object: form number, file identifier...'), - 'example_value': '42' + 'example_value': '42', } RECIPIENT_PARAM = { 'description': _('request recipient: usually customer number'), - 'example_value': '44317013900036' + 'example_value': '44317013900036', } SIREN_PARAM = { @@ -166,26 +164,25 @@ class APIEntreprise(BaseResource): 'example_value': '443170139', } - SIRET_PARAM = { - 'description': _('firms SIRET number'), - 'example_value': '44317013900036' - } + SIRET_PARAM = {'description': _('firms SIRET number'), 'example_value': '44317013900036'} YEAR_PARAM = { 'description': _('requested year'), 'example_value': '2019', } - @endpoint(perm='can_access', - pattern=r'(?P\w+)/$', - example_pattern='{association_id}/', - description=_('Get association\'s documents'), - parameters={ - 'association_id': ASSOCIATION_PARAM, - 'object': OBJECT_PARAM, - 'context': CONTEXT_PARAM, - 'recipient': RECIPIENT_PARAM - }) + @endpoint( + perm='can_access', + pattern=r'(?P\w+)/$', + example_pattern='{association_id}/', + description=_('Get association\'s documents'), + parameters={ + 'association_id': ASSOCIATION_PARAM, + 'object': OBJECT_PARAM, + 'context': CONTEXT_PARAM, + 'recipient': RECIPIENT_PARAM, + }, + ) def documents_associations(self, request, association_id, **kwargs): data = [] resp = self.get('documents_associations/%s/' % association_id, **kwargs) @@ -193,19 +190,24 @@ class APIEntreprise(BaseResource): # ignore documents with no type if not item.get('type'): continue - signature_elements = {'url': item['url'], - 'context': kwargs['context'], - 'object': kwargs['object'], - 'recipient': kwargs['recipient']} + signature_elements = { + 'url': item['url'], + 'context': kwargs['context'], + 'object': kwargs['object'], + 'recipient': kwargs['recipient'], + } signature = signing.dumps(signature_elements) document_url = request.build_absolute_uri( - reverse('generic-endpoint', - kwargs={ - 'connector': self.get_connector_slug(), - 'slug': self.slug, - 'endpoint': 'document', - 'rest': '%s/%s/' % (association_id, signature), - })) + reverse( + 'generic-endpoint', + kwargs={ + 'connector': self.get_connector_slug(), + 'slug': self.slug, + 'endpoint': 'document', + 'rest': '%s/%s/' % (association_id, signature), + }, + ) + ) item['id'] = item['timestamp'] item['text'] = item['type'] item['url'] = document_url @@ -214,19 +216,21 @@ class APIEntreprise(BaseResource): data.sort(key=lambda i: i['id']) return {'err': 0, 'data': data} - @endpoint(pattern=r'(?P\w+)/(?P[\:\w-]+)/$', - example_pattern='{association_id}/{document_id}/', - description=_('Get association\'s document'), - parameters={ - 'association_id': ASSOCIATION_PARAM, - 'document_id': { - 'description': _('document id'), - 'example_value': 'A1500660325', - }, - 'object': OBJECT_PARAM, - 'context': CONTEXT_PARAM, - 'recipient': RECIPIENT_PARAM, - }) + @endpoint( + pattern=r'(?P\w+)/(?P[\:\w-]+)/$', + example_pattern='{association_id}/{document_id}/', + description=_('Get association\'s document'), + parameters={ + 'association_id': ASSOCIATION_PARAM, + 'document_id': { + 'description': _('document id'), + 'example_value': 'A1500660325', + }, + 'object': OBJECT_PARAM, + 'context': CONTEXT_PARAM, + 'recipient': RECIPIENT_PARAM, + }, + ) def document(self, request, association_id, document_id, **kwargs): try: params = signing.loads(document_id, max_age=DOCUMENT_SIGNATURE_MAX_AGE) @@ -237,20 +241,22 @@ class APIEntreprise(BaseResource): return HttpResponse(response, content_type='application/pdf') raise Http404('document not found') - @endpoint(name='document_association', - pattern=r'(?P\w+)/get-last/$', - example_pattern='{association_id}/get-last/', - description=_('Get association\'s last document of type'), - parameters={ - 'association_id': ASSOCIATION_PARAM, - 'document_type': { - 'description': _('document type'), - 'example_value': 'Statuts', - }, - 'object': OBJECT_PARAM, - 'context': CONTEXT_PARAM, - 'recipient': RECIPIENT_PARAM, - }) + @endpoint( + name='document_association', + pattern=r'(?P\w+)/get-last/$', + example_pattern='{association_id}/get-last/', + description=_('Get association\'s last document of type'), + parameters={ + 'association_id': ASSOCIATION_PARAM, + 'document_type': { + 'description': _('document type'), + 'example_value': 'Statuts', + }, + 'object': OBJECT_PARAM, + 'context': CONTEXT_PARAM, + 'recipient': RECIPIENT_PARAM, + }, + ) def get_last_document_of_type(self, request, association_id, document_type, **kwargs): document = None resp = self.get('documents_associations/%s/' % association_id, **kwargs) @@ -260,46 +266,49 @@ class APIEntreprise(BaseResource): document = documents[-1] return {'data': document} - @endpoint(perm='can_access', - pattern=r'(?P\w+)/$', - example_pattern='{siren}/', - description=_('Get firm\'s data from Infogreffe'), - parameters={ - 'siren': SIREN_PARAM, - 'object': OBJECT_PARAM, - 'context': CONTEXT_PARAM, - 'recipient': RECIPIENT_PARAM, - }) + @endpoint( + perm='can_access', + pattern=r'(?P\w+)/$', + example_pattern='{siren}/', + description=_('Get firm\'s data from Infogreffe'), + parameters={ + 'siren': SIREN_PARAM, + 'object': OBJECT_PARAM, + 'context': CONTEXT_PARAM, + 'recipient': RECIPIENT_PARAM, + }, + ) def extraits_rcs(self, request, siren, **kwargs): return self.get('extraits_rcs_infogreffe/%s/' % siren, **kwargs) - @endpoint(perm='can_access', - pattern=r'(?P\w+)/$', - example_pattern='{association_id}/', - description=_('Get association\'s related informations'), - parameters={ - 'association_id': ASSOCIATION_PARAM, - 'object': OBJECT_PARAM, - 'context': CONTEXT_PARAM, - 'recipient': RECIPIENT_PARAM, - }) + @endpoint( + perm='can_access', + pattern=r'(?P\w+)/$', + example_pattern='{association_id}/', + description=_('Get association\'s related informations'), + parameters={ + 'association_id': ASSOCIATION_PARAM, + 'object': OBJECT_PARAM, + 'context': CONTEXT_PARAM, + 'recipient': RECIPIENT_PARAM, + }, + ) def associations(self, request, association_id, **kwargs): return self.get('associations/%s/' % association_id, **kwargs) - @endpoint(perm='can_access', - pattern=r'(?P\w+)/$', - example_pattern='{siren}/', - description=_('Get firm\'s related informations'), - parameters={ - 'siren': SIREN_PARAM, - 'object': OBJECT_PARAM, - 'context': CONTEXT_PARAM, - 'recipient': RECIPIENT_PARAM, - 'include_private': { - 'description': _('Include private informations'), - 'example_value': 'true' - } - }) + @endpoint( + perm='can_access', + pattern=r'(?P\w+)/$', + example_pattern='{siren}/', + description=_('Get firm\'s related informations'), + parameters={ + 'siren': SIREN_PARAM, + 'object': OBJECT_PARAM, + 'context': CONTEXT_PARAM, + 'recipient': RECIPIENT_PARAM, + 'include_private': {'description': _('Include private informations'), 'example_value': 'true'}, + }, + ) def entreprises(self, request, siren, include_private=False, **kwargs): if len(siren) != 9: raise APIError(_('invalid SIREN length (must be 9 characters)')) @@ -307,60 +316,68 @@ class APIEntreprise(BaseResource): kwargs['non_diffusables'] = True return self.get('entreprises/%s/' % siren, **kwargs) - @endpoint(perm='can_access', - methods=['get'], - pattern=r'(?P\w+)/$', - example_pattern='{siret}/', - description_get=_('Get firms\'s related informations'), - parameters={ - 'siret': SIRET_PARAM, - 'object': OBJECT_PARAM, - 'context': CONTEXT_PARAM, - 'recipient': RECIPIENT_PARAM, - }) + @endpoint( + perm='can_access', + methods=['get'], + pattern=r'(?P\w+)/$', + example_pattern='{siret}/', + description_get=_('Get firms\'s related informations'), + parameters={ + 'siret': SIRET_PARAM, + 'object': OBJECT_PARAM, + 'context': CONTEXT_PARAM, + 'recipient': RECIPIENT_PARAM, + }, + ) def etablissements(self, request, siret, **kwargs): return self.get('etablissements/%s/' % siret, **kwargs) - @endpoint(perm='can_access', - methods=['get'], - pattern=r'(?P\w+)/$', - example_pattern='{siret}/', - description_get=_('Get firms\'s financial year informations'), - parameters={ - 'siret': SIRET_PARAM, - 'object': OBJECT_PARAM, - 'context': CONTEXT_PARAM, - 'recipient': RECIPIENT_PARAM, - }) + @endpoint( + perm='can_access', + methods=['get'], + pattern=r'(?P\w+)/$', + example_pattern='{siret}/', + description_get=_('Get firms\'s financial year informations'), + parameters={ + 'siret': SIRET_PARAM, + 'object': OBJECT_PARAM, + 'context': CONTEXT_PARAM, + 'recipient': RECIPIENT_PARAM, + }, + ) def exercices(self, request, siret, **kwargs): return self.get('exercices/%s/' % siret, **kwargs) - @endpoint(perm='can_access', - pattern=r'(?P\w+)/$', - example_pattern='{siren}/', - description=_('Get firm\'s annual workforce data'), - parameters={ - 'siren': SIREN_PARAM, - 'object': OBJECT_PARAM, - 'context': CONTEXT_PARAM, - 'recipient': RECIPIENT_PARAM, - }) + @endpoint( + perm='can_access', + pattern=r'(?P\w+)/$', + example_pattern='{siren}/', + description=_('Get firm\'s annual workforce data'), + parameters={ + 'siren': SIREN_PARAM, + 'object': OBJECT_PARAM, + 'context': CONTEXT_PARAM, + 'recipient': RECIPIENT_PARAM, + }, + ) def effectifs_annuels_acoss_covid(self, request, siren, **kwargs): if len(siren) != 9: raise APIError(_('invalid SIREN length (must be 9 characters)')) return self.get('effectifs_annuels_acoss_covid/%s/' % siren, **kwargs) - @endpoint(perm='can_access', - pattern=r'(?P\w+)/(?P\w+)/(?P\w+)/$', - description=_('Get firm\'s monthly workforce data, by SIREN'), - parameters={ - 'year': YEAR_PARAM, - 'month': MONTH_PARAM, - 'siren': SIREN_PARAM, - 'object': OBJECT_PARAM, - 'context': CONTEXT_PARAM, - 'recipient': RECIPIENT_PARAM, - }) + @endpoint( + perm='can_access', + pattern=r'(?P\w+)/(?P\w+)/(?P\w+)/$', + description=_('Get firm\'s monthly workforce data, by SIREN'), + parameters={ + 'year': YEAR_PARAM, + 'month': MONTH_PARAM, + 'siren': SIREN_PARAM, + 'object': OBJECT_PARAM, + 'context': CONTEXT_PARAM, + 'recipient': RECIPIENT_PARAM, + }, + ) def entreprise_effectifs_mensuels_acoss_covid(self, request, year, month, siren, **kwargs): if len(siren) != 9: raise APIError(_('invalid SIREN length (must be 9 characters)')) @@ -369,17 +386,19 @@ class APIEntreprise(BaseResource): 'effectifs_mensuels_acoss_covid/%s/%s/entreprise/%s/' % (year, month, siren), **kwargs ) - @endpoint(perm='can_access', - pattern=r'(?P\w+)/(?P\w+)/(?P\w+)/$', - description=_('Get firm\'s monthly workforce data, by SIRET'), - parameters={ - 'year': YEAR_PARAM, - 'month': MONTH_PARAM, - 'siret': SIRET_PARAM, - 'object': OBJECT_PARAM, - 'context': CONTEXT_PARAM, - 'recipient': RECIPIENT_PARAM, - }) + @endpoint( + perm='can_access', + pattern=r'(?P\w+)/(?P\w+)/(?P\w+)/$', + description=_('Get firm\'s monthly workforce data, by SIRET'), + parameters={ + 'year': YEAR_PARAM, + 'month': MONTH_PARAM, + 'siret': SIRET_PARAM, + 'object': OBJECT_PARAM, + 'context': CONTEXT_PARAM, + 'recipient': RECIPIENT_PARAM, + }, + ) def etablissement_effectifs_mensuels_acoss_covid(self, request, year, month, siret, **kwargs): month = month.zfill(2) return self.get( diff --git a/passerelle/apps/api_particulier/known_errors.py b/passerelle/apps/api_particulier/known_errors.py index e456b38a..d2eb83e5 100644 --- a/passerelle/apps/api_particulier/known_errors.py +++ b/passerelle/apps/api_particulier/known_errors.py @@ -29,7 +29,7 @@ KNOWN_ERRORS = { 'Il existe au moins un enfant pour lequel il existe un droit sur le dossier et/ou à la période demandée', 'Il existe des droits pour la prestation sélectionnée sur le dossier et/ou la période demandée', 'Il existe des droits pour la prestation sélectionnée sur le dossier et/ou la période demandée (après date du jour)', - 'L’opérateurs téléphonique» ne propose pas de raccordement SMS avec un prestataire externe (raccordement avec un numéro court). ' + 'L’opérateurs téléphonique» ne propose pas de raccordement SMS avec un prestataire externe (raccordement avec un numéro court). ', }, 500: { 'Les informations souhaitées sont momentanément indisponibles. Merci de renouveler votre demande ultérieurement.', @@ -42,6 +42,6 @@ KNOWN_ERRORS = { "Votre demande n'a pu aboutir en raison d'un incident technique momentané. Merci de renouveler votre demande ultérieurement.", "Votre demande n'a pu aboutir en raison d'une erreur fonctionnelle lié à l'appel au service IMC.", "Votre demande n'a pu aboutir en raison d'une erreur technique lié à l'appel au service IMC.", - "Votre demande n’a pu aboutir en raison d'un problème technique lié aux données entrantes du webservice. Merci de renouveler votre demande ultérieurement." - } + "Votre demande n’a pu aboutir en raison d'un problème technique lié aux données entrantes du webservice. Merci de renouveler votre demande ultérieurement.", + }, } diff --git a/passerelle/apps/api_particulier/migrations/0001_initial.py b/passerelle/apps/api_particulier/migrations/0001_initial.py index c111fe00..8e5b5498 100644 --- a/passerelle/apps/api_particulier/migrations/0001_initial.py +++ b/passerelle/apps/api_particulier/migrations/0001_initial.py @@ -14,14 +14,50 @@ class Migration(migrations.Migration): migrations.CreateModel( name='APIParticulier', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('_platform', models.CharField(choices=[(b'prod', 'Production'), (b'test', 'Test')], max_length=8, verbose_name='Platform')), - ('_api_key', models.CharField(default=b'', max_length=64, verbose_name='API key', blank=True)), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_apiparticulier_users_+', related_query_name='+', blank=True)), + ( + 'log_level', + models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + '_platform', + models.CharField( + choices=[(b'prod', 'Production'), (b'test', 'Test')], + max_length=8, + verbose_name='Platform', + ), + ), + ( + '_api_key', + models.CharField(default=b'', max_length=64, verbose_name='API key', blank=True), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_apiparticulier_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'abstract': False, diff --git a/passerelle/apps/api_particulier/models.py b/passerelle/apps/api_particulier/models.py index 2b94c9fe..f53a8aa3 100644 --- a/passerelle/apps/api_particulier/models.py +++ b/passerelle/apps/api_particulier/models.py @@ -42,11 +42,7 @@ from .known_errors import KNOWN_ERRORS class APIParticulier(BaseResource): PLATFORMS = [ - { - 'name': 'prod', - 'label': _('Production'), - 'url': 'https://particulier.api.gouv.fr/api/' - }, + {'name': 'prod', 'label': _('Production'), 'url': 'https://particulier.api.gouv.fr/api/'}, { 'name': 'test', 'label': _('Test'), @@ -58,13 +54,10 @@ class APIParticulier(BaseResource): platform = models.CharField( verbose_name=_('Platform'), max_length=8, - choices=[(key, platform['label']) for key, platform in PLATFORMS.items()]) + choices=[(key, platform['label']) for key, platform in PLATFORMS.items()], + ) - api_key = models.CharField( - max_length=256, - default='', - blank=True, - verbose_name=_('API key')) + api_key = models.CharField(max_length=256, default='', blank=True, verbose_name=_('API key')) log_requests_errors = False @@ -79,28 +72,24 @@ class APIParticulier(BaseResource): if user: headers['X-User'] = user try: - response = self.requests.get( - url, - headers=headers, - timeout=5, - **kwargs) + response = self.requests.get(url, headers=headers, timeout=5, **kwargs) except requests.RequestException as e: raise APIError( - u'API-particulier platform "%s" connection error: %s' % - (self.platform, exception_to_text(e)), + u'API-particulier platform "%s" connection error: %s' % (self.platform, exception_to_text(e)), log_error=True, data={ 'code': 'connection-error', 'platform': self.platform, 'error': six.text_type(e), - }) + }, + ) try: data = response.json() except JSONDecodeError as e: content = repr(response.content[:1000]) raise APIError( - u'API-particulier platform "%s" returned non-JSON content with status %s: %s' % - (self.platform, response.status_code, content), + u'API-particulier platform "%s" returned non-JSON content with status %s: %s' + % (self.platform, response.status_code, content), log_error=True, data={ 'code': 'non-json', @@ -108,7 +97,8 @@ class APIParticulier(BaseResource): 'exception': six.text_type(e), 'platform': self.platform, 'content': content, - }) + }, + ) if response.status_code != 200: # avoid logging http errors about non-transport failure message = data.get('message', '') @@ -120,162 +110,180 @@ class APIParticulier(BaseResource): 'status_code': response.status_code, 'platform': self.platform, 'content': data, - }) + }, + ) raise APIError( - u'API-particulier platform "%s" returned a non 200 status %s: %s' % - (self.platform, response.status_code, data), + u'API-particulier platform "%s" returned a non 200 status %s: %s' + % (self.platform, response.status_code, data), log_error=True, data={ 'code': 'non-200', 'status_code': response.status_code, 'platform': self.platform, 'content': data, - }) + }, + ) return { 'err': 0, 'data': data, } - @endpoint(perm='can_access', - show=False, - description=_('Get citizen\'s fiscal informations'), - parameters={ - 'numero_fiscal': { - 'description': _('fiscal identifier'), - 'example_value': '1562456789521', - }, - 'reference_avis': { - 'description': _('tax notice number'), - 'example_value': '1512456789521', - }, - 'user': { - 'description': _('requesting user'), - 'example_value': 'John Doe (agent)', - }, - }) + @endpoint( + perm='can_access', + show=False, + description=_('Get citizen\'s fiscal informations'), + parameters={ + 'numero_fiscal': { + 'description': _('fiscal identifier'), + 'example_value': '1562456789521', + }, + 'reference_avis': { + 'description': _('tax notice number'), + 'example_value': '1512456789521', + }, + 'user': { + 'description': _('requesting user'), + 'example_value': 'John Doe (agent)', + }, + }, + ) def impots_svair(self, request, numero_fiscal, reference_avis, user=None): # deprecated endpoint return self.v2_avis_imposition(request, numero_fiscal, reference_avis, user=user) - @endpoint(name='avis-imposition', - perm='can_access', - description=_('Get citizen\'s fiscal informations'), - parameters={ - 'numero_fiscal': { - 'description': _('fiscal identifier'), - 'example_value': '1562456789521', - }, - 'reference_avis': { - 'description': _('tax notice number'), - 'example_value': '1512456789521', - }, - 'user': { - 'description': _('requesting user'), - 'example_value': 'John Doe (agent)', - }, - }, - json_schema_response={ - 'type': 'object', - 'required': ['err'], - 'properties': { - 'err': {'enum': [0, 1]}, - 'declarant1': { - 'type': 'object', - 'properties': { - 'nom': {'type': 'string'}, - 'nomNaissance': {'type': 'string'}, - 'prenoms': {'type': 'string'}, - 'dateNaissance': {'type': 'string'} - }, - }, - 'declarant2': { - 'type': 'object', - 'properties': { - 'nom': {'type': 'string'}, - 'nomNaissance': {'type': 'string'}, - 'prenoms': {'type': 'string'}, - 'dateNaissance': {'type': 'string'} - } - }, - 'foyerFiscal': { - 'type': 'object', - 'properties': { - 'annee': {'type': 'integer'}, - 'adresse': {'type': 'string'}, - } - }, - 'dateRecouvrement': {'type': 'string', 'pattern': r'^\d{1,2}/\d{1,2}/\d{4}$'}, - 'dateEtablissement': {'type': 'string', 'pattern': r'^\d{1,2}/\d{1,2}/\d{4}$'}, - 'nombreParts': {'type': 'integer'}, - 'situationFamille': {'type': 'string'}, - 'nombrePersonnesCharge': {'type': 'integer'}, - 'revenuBrutGlobal': {'type': 'integer'}, - 'revenuImposable': {'type': 'integer'}, - 'impotRevenuNetAvantCorrections': {'type': 'integer'}, - 'montantImpot': {'type': 'integer'}, - 'revenuFiscalReference': {'type': 'integer'}, - 'anneeImpots': {'type': 'string', 'pattern': r'^[0-9]{4}$'}, - 'anneeRevenus': {'type': 'string', 'pattern': r'^[0-9]{4}$'}, - 'erreurCorrectif': {'type': 'string'}, - 'situationPartielle': {'type': 'string'} - } - }) + @endpoint( + name='avis-imposition', + perm='can_access', + description=_('Get citizen\'s fiscal informations'), + parameters={ + 'numero_fiscal': { + 'description': _('fiscal identifier'), + 'example_value': '1562456789521', + }, + 'reference_avis': { + 'description': _('tax notice number'), + 'example_value': '1512456789521', + }, + 'user': { + 'description': _('requesting user'), + 'example_value': 'John Doe (agent)', + }, + }, + json_schema_response={ + 'type': 'object', + 'required': ['err'], + 'properties': { + 'err': {'enum': [0, 1]}, + 'declarant1': { + 'type': 'object', + 'properties': { + 'nom': {'type': 'string'}, + 'nomNaissance': {'type': 'string'}, + 'prenoms': {'type': 'string'}, + 'dateNaissance': {'type': 'string'}, + }, + }, + 'declarant2': { + 'type': 'object', + 'properties': { + 'nom': {'type': 'string'}, + 'nomNaissance': {'type': 'string'}, + 'prenoms': {'type': 'string'}, + 'dateNaissance': {'type': 'string'}, + }, + }, + 'foyerFiscal': { + 'type': 'object', + 'properties': { + 'annee': {'type': 'integer'}, + 'adresse': {'type': 'string'}, + }, + }, + 'dateRecouvrement': {'type': 'string', 'pattern': r'^\d{1,2}/\d{1,2}/\d{4}$'}, + 'dateEtablissement': {'type': 'string', 'pattern': r'^\d{1,2}/\d{1,2}/\d{4}$'}, + 'nombreParts': {'type': 'integer'}, + 'situationFamille': {'type': 'string'}, + 'nombrePersonnesCharge': {'type': 'integer'}, + 'revenuBrutGlobal': {'type': 'integer'}, + 'revenuImposable': {'type': 'integer'}, + 'impotRevenuNetAvantCorrections': {'type': 'integer'}, + 'montantImpot': {'type': 'integer'}, + 'revenuFiscalReference': {'type': 'integer'}, + 'anneeImpots': {'type': 'string', 'pattern': r'^[0-9]{4}$'}, + 'anneeRevenus': {'type': 'string', 'pattern': r'^[0-9]{4}$'}, + 'erreurCorrectif': {'type': 'string'}, + 'situationPartielle': {'type': 'string'}, + }, + }, + ) def v2_avis_imposition(self, request, numero_fiscal, reference_avis, user=None): numero_fiscal = numero_fiscal.strip()[:13] reference_avis = reference_avis.strip()[:13] if len(numero_fiscal) < 13 or len(reference_avis) < 13: raise APIError('bad numero_fiscal or reference_avis, must be 13 chars long', status_code=400) - return self.get('v2/avis-imposition', params={ - 'numeroFiscal': numero_fiscal, - 'referenceAvis': reference_avis, - }, user=user) + return self.get( + 'v2/avis-imposition', + params={ + 'numeroFiscal': numero_fiscal, + 'referenceAvis': reference_avis, + }, + user=user, + ) - @endpoint(perm='can_access', - show=False, - description=_('Get family allowances recipient informations'), - parameters={ - 'code_postal': { - 'description': _('postal code'), - 'example_value': '99148', - }, - 'numero_allocataire': { - 'description': _('recipient identifier'), - 'example_value': '0000354', - }, - 'user': { - 'description': _('requesting user'), - 'example_value': 'John Doe (agent)', - }, - }) + @endpoint( + perm='can_access', + show=False, + description=_('Get family allowances recipient informations'), + parameters={ + 'code_postal': { + 'description': _('postal code'), + 'example_value': '99148', + }, + 'numero_allocataire': { + 'description': _('recipient identifier'), + 'example_value': '0000354', + }, + 'user': { + 'description': _('requesting user'), + 'example_value': 'John Doe (agent)', + }, + }, + ) def caf_famille(self, request, code_postal, numero_allocataire, user=None): # deprecated endpoint return self.v2_situation_familiale(request, code_postal, numero_allocataire, user=user) - @endpoint(name='situation-familiale', - perm='can_access', - description=_('Get family allowances recipient informations'), - parameters={ - 'code_postal': { - 'description': _('postal code'), - 'example_value': '99148', - }, - 'numero_allocataire': { - 'description': _('recipient identifier'), - 'example_value': '0000354', - }, - 'user': { - 'description': _('requesting user'), - 'example_value': 'John Doe (agent)', - }, - }) + @endpoint( + name='situation-familiale', + perm='can_access', + description=_('Get family allowances recipient informations'), + parameters={ + 'code_postal': { + 'description': _('postal code'), + 'example_value': '99148', + }, + 'numero_allocataire': { + 'description': _('recipient identifier'), + 'example_value': '0000354', + }, + 'user': { + 'description': _('requesting user'), + 'example_value': 'John Doe (agent)', + }, + }, + ) def v2_situation_familiale(self, request, code_postal, numero_allocataire, user=None): if not code_postal.strip() or not numero_allocataire.strip(): raise APIError('missing code_postal or numero_allocataire', status_code=400) - return self.get('v2/composition-familiale', params={ - 'codePostal': code_postal, - 'numeroAllocataire': numero_allocataire, - }, user=user) + return self.get( + 'v2/composition-familiale', + params={ + 'codePostal': code_postal, + 'numeroAllocataire': numero_allocataire, + }, + user=user, + ) category = _('Business Process Connectors') diff --git a/passerelle/apps/arcgis/migrations/0001_initial.py b/passerelle/apps/arcgis/migrations/0001_initial.py index 6eabe730..3e18f955 100644 --- a/passerelle/apps/arcgis/migrations/0001_initial.py +++ b/passerelle/apps/arcgis/migrations/0001_initial.py @@ -14,13 +14,36 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Arcgis', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), + ( + 'log_level', + models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), ('base_url', models.CharField(max_length=256, verbose_name='SIG Url')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_arcgis_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_arcgis_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': 'Arcgis Webservice', diff --git a/passerelle/apps/arcgis/migrations/0003_auto_20181102_1550.py b/passerelle/apps/arcgis/migrations/0003_auto_20181102_1550.py index ad3a8ab6..e1c03ded 100644 --- a/passerelle/apps/arcgis/migrations/0003_auto_20181102_1550.py +++ b/passerelle/apps/arcgis/migrations/0003_auto_20181102_1550.py @@ -29,7 +29,9 @@ class Migration(migrations.Migration): migrations.AddField( model_name='arcgis', name='client_certificate', - field=models.FileField(blank=True, null=True, upload_to=b'', verbose_name='TLS client certificate'), + field=models.FileField( + blank=True, null=True, upload_to=b'', verbose_name='TLS client certificate' + ), ), migrations.AddField( model_name='arcgis', @@ -54,6 +56,18 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='arcgis', name='log_level', - field=models.CharField(choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')], default=b'INFO', max_length=10, verbose_name='Log Level'), + field=models.CharField( + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + default=b'INFO', + max_length=10, + verbose_name='Log Level', + ), ), ] diff --git a/passerelle/apps/arcgis/migrations/0005_auto_20200310_1517.py b/passerelle/apps/arcgis/migrations/0005_auto_20200310_1517.py index e2991110..1e63e049 100644 --- a/passerelle/apps/arcgis/migrations/0005_auto_20200310_1517.py +++ b/passerelle/apps/arcgis/migrations/0005_auto_20200310_1517.py @@ -18,17 +18,51 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Query', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('name', models.CharField(max_length=128, verbose_name='Name')), ('slug', models.SlugField(max_length=128, verbose_name='Slug')), ('description', models.TextField(blank=True, verbose_name='Description')), ('folder', models.CharField(blank=True, max_length=64, verbose_name='ArcGis Folder')), ('service', models.CharField(max_length=64, verbose_name='ArcGis Service')), ('layer', models.CharField(blank=True, max_length=8, verbose_name='ArcGis Layer')), - ('where', models.TextField(blank=True, help_text="Use syntax {name} to introduce a string parameter and {name:d} for a decimal parameter. ex.:
adress LIKE ('%' || UPPER({adress}) || '%')
population < {population:d}
", validators=[passerelle.apps.arcgis.models.validate_where], verbose_name='ArcGis Where Clause')), - ('id_template', models.TextField(blank=True, help_text="Use Django's template syntax. Attributes can be accessed through {{ attributes.name }}", validators=[passerelle.utils.templates.validate_template], verbose_name='Id template')), - ('text_template', models.TextField(blank=True, help_text="Use Django's template syntax. Attributes can be accessed through {{ attributes.name }}", validators=[passerelle.utils.templates.validate_template], verbose_name='Text template')), - ('resource', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='arcgis.ArcGIS', verbose_name='Resource')), + ( + 'where', + models.TextField( + blank=True, + help_text="Use syntax {name} to introduce a string parameter and {name:d} for a decimal parameter. ex.:
adress LIKE ('%' || UPPER({adress}) || '%')
population < {population:d}
", + validators=[passerelle.apps.arcgis.models.validate_where], + verbose_name='ArcGis Where Clause', + ), + ), + ( + 'id_template', + models.TextField( + blank=True, + help_text="Use Django's template syntax. Attributes can be accessed through {{ attributes.name }}", + validators=[passerelle.utils.templates.validate_template], + verbose_name='Id template', + ), + ), + ( + 'text_template', + models.TextField( + blank=True, + help_text="Use Django's template syntax. Attributes can be accessed through {{ attributes.name }}", + validators=[passerelle.utils.templates.validate_template], + verbose_name='Text template', + ), + ), + ( + 'resource', + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to='arcgis.ArcGIS', + verbose_name='Resource', + ), + ), ], options={ 'ordering': ['name'], diff --git a/passerelle/apps/arcgis/migrations/0006_auto_20200401_1025.py b/passerelle/apps/arcgis/migrations/0006_auto_20200401_1025.py index 181791b4..e5379a82 100644 --- a/passerelle/apps/arcgis/migrations/0006_auto_20200401_1025.py +++ b/passerelle/apps/arcgis/migrations/0006_auto_20200401_1025.py @@ -16,7 +16,9 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='arcgis', name='client_certificate', - field=models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS client certificate'), + field=models.FileField( + blank=True, null=True, upload_to='', verbose_name='TLS client certificate' + ), ), migrations.AlterField( model_name='arcgis', @@ -26,6 +28,11 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='query', name='resource', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='queries', to='arcgis.ArcGIS', verbose_name='Resource'), + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name='queries', + to='arcgis.ArcGIS', + verbose_name='Resource', + ), ), ] diff --git a/passerelle/apps/arcgis/models.py b/passerelle/apps/arcgis/models.py index e861edb8..0b7fbb31 100644 --- a/passerelle/apps/arcgis/models.py +++ b/passerelle/apps/arcgis/models.py @@ -46,44 +46,61 @@ class ArcGIS(BaseResource, HTTPResource): class Meta: verbose_name = _('ArcGIS REST API') - @endpoint(name='mapservice-query', - description=_('Map Service Query'), - perm='can_access', - parameters={ - 'folder': { - 'description': _('Folder name'), - 'example_value': 'Specialty', - }, - 'service': { - 'description': _('Service name'), - 'example_value': 'ESRI_StateCityHighway_USA', - }, - 'layer': { - 'description': _('Layer or table name'), - 'example_value': '1', - }, - 'lat': {'description': _('Latitude')}, - 'lon': {'description': _('Longitude')}, - 'latmin': {'description': _('Minimal latitude (envelope)')}, - 'lonmin': {'description': _('Minimal longitude (envelope)')}, - 'latmax': {'description': _('Maximal latitude (envelope)')}, - 'lonmax': {'description': _('Maximal longitude (envelope)')}, - 'q': {'description': _('Search text in display field')}, - 'template': { - 'description': _('Django template for text attribute'), - 'example_value': '{{ attributes.STATE_NAME }} ({{ attributes.STATE_ABBR }})', - }, - 'id_template': { - 'description': _('Django template for id attribute'), - }, - 'full': { - 'description': _('Returns all ArcGIS informations (geometry, metadata)'), - 'type': 'bool', - }, - }) - def mapservice_query(self, request, service, layer='0', folder='', lat=None, lon=None, - latmin=None, lonmin=None, latmax=None, lonmax=None, q=None, - template=None, id_template=None, full=False, **kwargs): + @endpoint( + name='mapservice-query', + description=_('Map Service Query'), + perm='can_access', + parameters={ + 'folder': { + 'description': _('Folder name'), + 'example_value': 'Specialty', + }, + 'service': { + 'description': _('Service name'), + 'example_value': 'ESRI_StateCityHighway_USA', + }, + 'layer': { + 'description': _('Layer or table name'), + 'example_value': '1', + }, + 'lat': {'description': _('Latitude')}, + 'lon': {'description': _('Longitude')}, + 'latmin': {'description': _('Minimal latitude (envelope)')}, + 'lonmin': {'description': _('Minimal longitude (envelope)')}, + 'latmax': {'description': _('Maximal latitude (envelope)')}, + 'lonmax': {'description': _('Maximal longitude (envelope)')}, + 'q': {'description': _('Search text in display field')}, + 'template': { + 'description': _('Django template for text attribute'), + 'example_value': '{{ attributes.STATE_NAME }} ({{ attributes.STATE_ABBR }})', + }, + 'id_template': { + 'description': _('Django template for id attribute'), + }, + 'full': { + 'description': _('Returns all ArcGIS informations (geometry, metadata)'), + 'type': 'bool', + }, + }, + ) + def mapservice_query( + self, + request, + service, + layer='0', + folder='', + lat=None, + lon=None, + latmin=None, + lonmin=None, + latmax=None, + lonmax=None, + q=None, + template=None, + id_template=None, + full=False, + **kwargs, + ): url = urlparse.urljoin(self.base_url, 'services/') if folder: url = urlparse.urljoin(url, folder + '/') @@ -109,8 +126,7 @@ class ArcGIS(BaseResource, HTTPResource): lonmin, latmin = float(lonmin), float(latmin) lonmax, latmax = float(lonmax), float(latmax) except (ValueError,): - raise APIError(' and must be floats', - http_status=400) + raise APIError(' and must be floats', http_status=400) params['geometry'] = '{},{},{},{}'.format(lonmin, latmin, lonmax, latmax) params['geometryType'] = 'esriGeometryEnvelope' if q is not None: @@ -156,7 +172,7 @@ class ArcGIS(BaseResource, HTTPResource): feature['id'] = '%s' % get_feature_attribute(feature, id_fieldname) feature['text'] = '%s' % get_feature_attribute(feature, text_fieldname) else: - feature['id'] = feature['text'] = '%d' % (n+1) + feature['id'] = feature['text'] = '%d' % (n + 1) if template: feature['text'] = render_to_string(template, feature) if id_template: @@ -169,22 +185,30 @@ class ArcGIS(BaseResource, HTTPResource): return {'data': data, 'metadata': infos} return {'data': data} - @endpoint(name='district', - description=_('Districts in Nancy Town'), - parameters={ - 'lat': {'description': _('Latitude')}, - 'lon': {'description': _('Longitude')}, - }, - show=False) + @endpoint( + name='district', + description=_('Districts in Nancy Town'), + parameters={ + 'lat': {'description': _('Latitude')}, + 'lon': {'description': _('Longitude')}, + }, + show=False, + ) def district(self, request, lon=None, lat=None): # deprecated endpoint if 'NANCY_Grc' in self.base_url: # Nancy URL used to contains folder, service and layer, remove them self.base_url = 'https://geoservices.grand-nancy.org/arcgis/rest/' - features = self.mapservice_query(request, folder='public', service='NANCY_Grc', layer='0', - template='{{ attributes.NOM }}', - id_template='{{ attributes.NUMERO }}', - lon=lon, lat=lat)['data'] + features = self.mapservice_query( + request, + folder='public', + service='NANCY_Grc', + layer='0', + template='{{ attributes.NOM }}', + id_template='{{ attributes.NUMERO }}', + lon=lon, + lat=lat, + )['data'] if not features: raise APIError('No features found.') for feature in features: @@ -197,15 +221,14 @@ class ArcGIS(BaseResource, HTTPResource): @endpoint( name='tile', description=_('Tiles layer'), - pattern=r'^(?P[\w/]+)/(?P\d+)/(?P\d+)/(?P\d+)\.png$') + pattern=r'^(?P[\w/]+)/(?P\d+)/(?P\d+)/(?P\d+)\.png$', + ) def tile(self, request, layer, zoom, tile_x, tile_y): zoom = int(zoom) tile_x = int(tile_x) tile_y = int(tile_y) - bbox = '%.6f,%.6f,%.6f,%.6f' % ( - num2deg(tile_x, tile_y, zoom) + - num2deg(tile_x+1, tile_y+1, zoom)) + bbox = '%.6f,%.6f,%.6f,%.6f' % (num2deg(tile_x, tile_y, zoom) + num2deg(tile_x + 1, tile_y + 1, zoom)) # imageSR=3857: default projection for leaflet base_url = self.base_url @@ -213,19 +236,22 @@ class ArcGIS(BaseResource, HTTPResource): base_url += '/' return HttpResponse( self.requests.get( - base_url + - '%s/MapServer/export' % layer + - '?dpi=96&format=png24&bboxSR=4326&imageSR=3857&' + - 'transparent=true&size=256,256&f=image&' + - 'bbox=%s' % bbox + base_url + + '%s/MapServer/export' % layer + + '?dpi=96&format=png24&bboxSR=4326&imageSR=3857&' + + 'transparent=true&size=256,256&f=image&' + + 'bbox=%s' % bbox ).content, - content_type='image/png') + content_type='image/png', + ) - @endpoint(name='q', - description=_('Query'), - pattern=r'^(?P[\w:_-]+)/$', - perm='can_access', - show=False) + @endpoint( + name='q', + description=_('Query'), + pattern=r'^(?P[\w:_-]+)/$', + perm='can_access', + show=False, + ) def q(self, request, query_slug, q=None, full=False, **kwargs): query = get_object_or_404(Query, resource=self, slug=query_slug) refs = [ref for ref, _ in query.where_references] @@ -282,22 +308,12 @@ def validate_where(format_string): class Query(BaseQuery): resource = models.ForeignKey( - to=ArcGIS, - related_name='queries', - verbose_name=_('Resource'), - on_delete=models.CASCADE) + to=ArcGIS, related_name='queries', verbose_name=_('Resource'), on_delete=models.CASCADE + ) - folder = models.CharField( - verbose_name=_('ArcGis Folder'), - max_length=64, - blank=True) - service = models.CharField( - verbose_name=_('ArcGis Service'), - max_length=64) - layer = models.CharField( - verbose_name=_('ArcGis Layer'), - max_length=8, - blank=True) + folder = models.CharField(verbose_name=_('ArcGis Folder'), max_length=64, blank=True) + service = models.CharField(verbose_name=_('ArcGis Service'), max_length=64) + layer = models.CharField(verbose_name=_('ArcGis Layer'), max_length=8, blank=True) where = models.TextField( verbose_name=_('ArcGis Where Clause'), @@ -308,19 +324,28 @@ class Query(BaseQuery): 'Use syntax {name} to introduce a string ' 'parameter and {name:d} for a decimal parameter. ex.:
' 'adress LIKE (\'%\' || UPPER({adress}) || \'%\')
' - 'population < {population:d}
'))) + 'population < {population:d}' + ) + ), + ) id_template = models.TextField( verbose_name=_('Id template'), validators=[validate_template], - help_text=_('Use Django\'s template syntax. Attributes can be accessed through {{ attributes.name }}'), - blank=True) + help_text=_( + 'Use Django\'s template syntax. Attributes can be accessed through {{ attributes.name }}' + ), + blank=True, + ) text_template = models.TextField( verbose_name=_('Text template'), - help_text=_('Use Django\'s template syntax. Attributes can be accessed through {{ attributes.name }}'), + help_text=_( + 'Use Django\'s template syntax. Attributes can be accessed through {{ attributes.name }}' + ), validators=[validate_template], - blank=True) + blank=True, + ) delete_view = 'arcgis-query-delete' edit_view = 'arcgis-query-edit' @@ -328,15 +353,20 @@ class Query(BaseQuery): @property def where_references(self): if self.where: - return [(ref, int if spec and spec[-1] == 'd' else str) - for _, ref, spec, _ in SqlFormatter().parse(self.where) if ref is not None] + return [ + (ref, int if spec and spec[-1] == 'd' else str) + for _, ref, spec, _ in SqlFormatter().parse(self.where) + if ref is not None + ] else: return [] def q(self, request, q=None, full=False, **kwargs): - kwargs.update({ - 'service': self.service, - }) + kwargs.update( + { + 'service': self.service, + } + ) if self.id_template: kwargs['id_template'] = self.id_template if self.text_template: diff --git a/passerelle/apps/arcgis/urls.py b/passerelle/apps/arcgis/urls.py index a71be3e7..fc9c0e50 100644 --- a/passerelle/apps/arcgis/urls.py +++ b/passerelle/apps/arcgis/urls.py @@ -19,10 +19,11 @@ from django.conf.urls import url from . import views management_urlpatterns = [ - url(r'^(?P[\w,-]+)/query/new/$', - views.QueryNew.as_view(), name='arcgis-query-new'), - url(r'^(?P[\w,-]+)/query/(?P\d+)/$', - views.QueryEdit.as_view(), name='arcgis-query-edit'), - url(r'^(?P[\w,-]+)/query/(?P\d+)/delete/$', - views.QueryDelete.as_view(), name='arcgis-query-delete'), + url(r'^(?P[\w,-]+)/query/new/$', views.QueryNew.as_view(), name='arcgis-query-new'), + url(r'^(?P[\w,-]+)/query/(?P\d+)/$', views.QueryEdit.as_view(), name='arcgis-query-edit'), + url( + r'^(?P[\w,-]+)/query/(?P\d+)/delete/$', + views.QueryDelete.as_view(), + name='arcgis-query-delete', + ), ] diff --git a/passerelle/apps/arpege_ecp/migrations/0001_initial.py b/passerelle/apps/arpege_ecp/migrations/0001_initial.py index 0d5b15af..54bb0189 100644 --- a/passerelle/apps/arpege_ecp/migrations/0001_initial.py +++ b/passerelle/apps/arpege_ecp/migrations/0001_initial.py @@ -14,15 +14,41 @@ class Migration(migrations.Migration): migrations.CreateModel( name='ArpegeECP', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), ('webservice_base_url', models.URLField(verbose_name='Webservice Base URL')), ('hawk_auth_id', models.CharField(max_length=64, verbose_name='Hawk Authentication id')), ('hawk_auth_key', models.CharField(max_length=64, verbose_name='Hawk Authentication secret')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_arpegeecp_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_arpegeecp_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'Arpege ECP', diff --git a/passerelle/apps/arpege_ecp/models.py b/passerelle/apps/arpege_ecp/models.py index b20ff70c..6b396626 100644 --- a/passerelle/apps/arpege_ecp/models.py +++ b/passerelle/apps/arpege_ecp/models.py @@ -56,8 +56,9 @@ class ArpegeECP(BaseResource): def get_access_token(self, NameID): url = urlparse.urljoin(self.webservice_base_url, 'LoginParSubOIDC') try: - response = self.requests.post(url, auth=HawkAuth(self.hawk_auth_id, self.hawk_auth_key), - json={'subOIDC': NameID}) + response = self.requests.post( + url, auth=HawkAuth(self.hawk_auth_id, self.hawk_auth_key), json={'subOIDC': NameID} + ) response.raise_for_status() except RequestException as e: raise APIError(u'Arpege server is down: %s' % e) @@ -73,7 +74,12 @@ class ArpegeECP(BaseResource): return result['Data']['AccessToken'] raise APIError(u'%s (%s)' % (result.get('LibErreur'), result.get('CodErreur'))) - @endpoint(name='api', pattern='^users/(?P\w+)/forms$', perm='can_access', description='Returns user forms') + @endpoint( + name='api', + pattern='^users/(?P\w+)/forms$', + perm='can_access', + description='Returns user forms', + ) def get_user_forms(self, request, nameid): access_token = self.get_access_token(nameid) url = urlparse.urljoin(self.webservice_base_url, 'DemandesUsager') @@ -98,14 +104,15 @@ class ArpegeECP(BaseResource): receipt_date = parse_date(data_administratives['date_depot']) except (KeyError, TypeError) as e: raise APIError(u'Arpege error: %s %r' % (e, json.dumps(demand)[:1000])) - d = {'url': demand['url'], - 'title': data_administratives.get('LibelleQualificationTypeDemande'), - 'name': data_administratives.get('LibelleQualificationTypeDemande'), - 'status': data_administratives.get('libelle_etat'), - 'form_receipt_time': receipt_time, - 'readable': True, - 'form_receipt_datetime': timezone.datetime.combine(receipt_date, receipt_time), - 'form_status_is_endpoint': data_administratives.get('date_fin_instruction') is not None, + d = { + 'url': demand['url'], + 'title': data_administratives.get('LibelleQualificationTypeDemande'), + 'name': data_administratives.get('LibelleQualificationTypeDemande'), + 'status': data_administratives.get('libelle_etat'), + 'form_receipt_time': receipt_time, + 'readable': True, + 'form_receipt_datetime': timezone.datetime.combine(receipt_date, receipt_time), + 'form_status_is_endpoint': data_administratives.get('date_fin_instruction') is not None, } data.append(d) return {'data': data} diff --git a/passerelle/apps/astregs/migrations/0001_initial.py b/passerelle/apps/astregs/migrations/0001_initial.py index 6848e27b..03c96b9e 100644 --- a/passerelle/apps/astregs/migrations/0001_initial.py +++ b/passerelle/apps/astregs/migrations/0001_initial.py @@ -18,7 +18,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='AstreGS', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), @@ -28,7 +31,12 @@ class Migration(migrations.Migration): ('organism', models.CharField(max_length=32, verbose_name='Organisme')), ('budget', models.CharField(max_length=32, verbose_name='Budget')), ('exercice', models.CharField(max_length=32, verbose_name='Exercice')), - ('users', models.ManyToManyField(blank=True, related_name='_astregs_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'users', + models.ManyToManyField( + blank=True, related_name='_astregs_users_+', related_query_name='+', to='base.ApiUser' + ), + ), ], options={ 'verbose_name': 'AstresGS', @@ -37,11 +45,17 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Link', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('name_id', models.CharField(max_length=32)), ('association_id', models.CharField(max_length=32)), ('created', models.DateTimeField(auto_now_add=True)), - ('resource', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='astregs.AstreGS')), + ( + 'resource', + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='astregs.AstreGS'), + ), ], ), migrations.AlterUniqueTogether( diff --git a/passerelle/apps/astregs/models.py b/passerelle/apps/astregs/models.py index 0e9bfd85..62f38501 100644 --- a/passerelle/apps/astregs/models.py +++ b/passerelle/apps/astregs/models.py @@ -46,18 +46,35 @@ ASSOCIATION_SCHEMA = { ], "properties": { "Financier": {"description": "financial association", "type": "string", "enum": ["true", "false"]}, - "CodeFamille": {"description": "association family code", "type": "string",}, - "CatTiers": {"description": "association category", "type": "string",}, - "NomEnregistrement": {"description": "association name", "type": "string",}, + "CodeFamille": { + "description": "association family code", + "type": "string", + }, + "CatTiers": { + "description": "association category", + "type": "string", + }, + "NomEnregistrement": { + "description": "association name", + "type": "string", + }, "StatutTiers": { "description": "association status", "type": "string", "enum": ["PROPOSE", "VALIDE", "REFUSE", "BLOQUE", "A COMPLETER"], }, "Type": {"description": "association type", "type": "string", "enum": ["D", "F", "*"]}, - "NumeroSiret": {"description": "SIREN number", "type": "string",}, - "NumeroSiretFin": {"description": "NIC number", "type": "string",}, - "AdresseTitre": {"type": "string",}, + "NumeroSiret": { + "description": "SIREN number", + "type": "string", + }, + "NumeroSiretFin": { + "description": "NIC number", + "type": "string", + }, + "AdresseTitre": { + "type": "string", + }, "AdresseIsAdresseDeCommande": {"type": "string", "enum": ["true", "false"]}, "AdresseIsAdresseDeFacturation": {"type": "string", "enum": ["true", "false"]}, }, @@ -78,13 +95,27 @@ CONTACT_SCHEMA = { "EncodeKeyStatut", ], "properties": { - "CodeContact": {"type": "string",}, - "CodeTitreCivilite": {"type": "string",}, - "Nom": {"type": "string",}, - "AdresseDestinataire": {"type": "string",}, - "CodePostal": {"type": "string",}, - "Ville": {"type": "string",}, - "EncodeKeyStatut": {"type": "string",}, + "CodeContact": { + "type": "string", + }, + "CodeTitreCivilite": { + "type": "string", + }, + "Nom": { + "type": "string", + }, + "AdresseDestinataire": { + "type": "string", + }, + "CodePostal": { + "type": "string", + }, + "Ville": { + "type": "string", + }, + "EncodeKeyStatut": { + "type": "string", + }, }, } @@ -105,21 +136,43 @@ DOCUMENT_SCHEMA = { "document", ], "properties": { - "Sujet": {"type": "string",}, - "Entite": {"type": "string",}, - "CodType": {"type": "string",}, - "Type": {"type": "string",}, - "hdnCodeTrt": {"type": "string",}, - "EncodeKeyEntite": {"type": "string",}, - "CodeDomaine": {"type": "string",}, - "CodDom": {"type": "string",}, + "Sujet": { + "type": "string", + }, + "Entite": { + "type": "string", + }, + "CodType": { + "type": "string", + }, + "Type": { + "type": "string", + }, + "hdnCodeTrt": { + "type": "string", + }, + "EncodeKeyEntite": { + "type": "string", + }, + "CodeDomaine": { + "type": "string", + }, + "CodDom": { + "type": "string", + }, "document": { "type": "object", "required": ['filename', 'content_type', 'content'], 'properties': { - 'filename': {'type': 'string',}, - 'content_type': {'type': 'string',}, - 'content': {'type': 'string',}, + 'filename': { + 'type': 'string', + }, + 'content_type': { + 'type': 'string', + }, + 'content': { + 'type': 'string', + }, }, }, }, @@ -141,14 +194,28 @@ GRANT_SCHEMA = { "CodeServiceUtilisateur", ], "properties": { - "Libelle": {"type": "string",}, - "LibelleCourt": {"type": "string",}, + "Libelle": { + "type": "string", + }, + "LibelleCourt": { + "type": "string", + }, "ModGestion": {"type": "string", "enum": ["1", "2", "3", "4"]}, - "TypeAide": {"type": "string",}, - "Sens": {"type": "string",}, - "CodeTiersDem": {"type": "string",}, - "CodeServiceGestionnaire": {"type": "string",}, - "CodeServiceUtilisateur": {"type": "string",}, + "TypeAide": { + "type": "string", + }, + "Sens": { + "type": "string", + }, + "CodeTiersDem": { + "type": "string", + }, + "CodeServiceGestionnaire": { + "type": "string", + }, + "CodeServiceUtilisateur": { + "type": "string", + }, }, } @@ -159,11 +226,21 @@ INDANA_SCHEMA = { "type": "object", "required": ["CodeDossier", "CodeInd_1", "AnneeInd_1", "ValInd_1"], "properties": { - "CodeDossier": {"type": "string",}, - "CodeInd_1": {"type": "string",}, - "AnneeInd_1": {"type": "string",}, - "ValInd_1": {"type": "string",}, - "IndAide": {"type": "string",}, + "CodeDossier": { + "type": "string", + }, + "CodeInd_1": { + "type": "string", + }, + "AnneeInd_1": { + "type": "string", + }, + "ValInd_1": { + "type": "string", + }, + "IndAide": { + "type": "string", + }, }, } @@ -174,9 +251,15 @@ INDANA_KEY_SCHEMA = { "type": "object", "required": ["CodeDossier", "CodeInd_1", "AnneeInd_1"], "properties": { - "CodeDossier": {"type": "string",}, - "CodeInd_1": {"type": "string",}, - "AnneeInd_1": {"type": "string",}, + "CodeDossier": { + "type": "string", + }, + "CodeInd_1": { + "type": "string", + }, + "AnneeInd_1": { + "type": "string", + }, }, } @@ -197,51 +280,26 @@ TIERS_RIB_SCHEMA = { "CodeStatut", "CodeDevise", "CodeIso2Pays", - "LibelleCompteEtranger" + "LibelleCompteEtranger", ], "properties": { - "CodeDevise": { - "type": "string" - }, - "CodeDomiciliation": { - "type": "string" - }, - "CodeIso2Pays": { - "type": "string" - }, - "CodePaiement": { - "type": "string" - }, + "CodeDevise": {"type": "string"}, + "CodeDomiciliation": {"type": "string"}, + "CodeIso2Pays": {"type": "string"}, + "CodePaiement": {"type": "string"}, "CodeStatut": { "type": "string", - "enum": ["PROPOSE", "VALIDE", "REFUSE", "A COMPLETER", - "BLOQUE", "EN MODIFICATION"] + "enum": ["PROPOSE", "VALIDE", "REFUSE", "A COMPLETER", "BLOQUE", "EN MODIFICATION"], }, - "CodeTiers": { - "type": "string" - }, - "IndicateurRibDefaut": { - "type": "string" - }, - "LibelleCompteEtranger": { - "type": "string" - }, - "LibelleCourt": { - "type": "string" - }, - "NumeroIban": { - "type": "string" - }, - "CleIban": { - "type": "string" - }, - "CodeBic": { - "type": "string" - }, - "IdRib": { - "type": "string" - } - } + "CodeTiers": {"type": "string"}, + "IndicateurRibDefaut": {"type": "string"}, + "LibelleCompteEtranger": {"type": "string"}, + "LibelleCourt": {"type": "string"}, + "NumeroIban": {"type": "string"}, + "CleIban": {"type": "string"}, + "CodeBic": {"type": "string"}, + "IdRib": {"type": "string"}, + }, } TIERS_RIB_UPDATE_SCHEMA = { @@ -259,45 +317,24 @@ TIERS_RIB_UPDATE_SCHEMA = { "CodeStatut", "CodeDevise", "CodeIso2Pays", - "LibelleCompteEtranger" + "LibelleCompteEtranger", ], "properties": { - "CodeDevise": { - "type": "string" - }, - "CodeDomiciliation": { - "type": "string" - }, - "CodeIso2Pays": { - "type": "string" - }, - "CodePaiement": { - "type": "string" - }, + "CodeDevise": {"type": "string"}, + "CodeDomiciliation": {"type": "string"}, + "CodeIso2Pays": {"type": "string"}, + "CodePaiement": {"type": "string"}, "CodeStatut": { "type": "string", - "enum": ["PROPOSE", "VALIDE", "REFUSE", "A COMPLETER", - "BLOQUE", "EN MODIFICATION"] + "enum": ["PROPOSE", "VALIDE", "REFUSE", "A COMPLETER", "BLOQUE", "EN MODIFICATION"], }, - "IndicateurRibDefaut": { - "type": "string" - }, - "LibelleCompteEtranger": { - "type": "string" - }, - "LibelleCourt": { - "type": "string" - }, - "NumeroIban": { - "type": "string" - }, - "CleIban": { - "type": "string" - }, - "CodeBic": { - "type": "string" - } - } + "IndicateurRibDefaut": {"type": "string"}, + "LibelleCompteEtranger": {"type": "string"}, + "LibelleCourt": {"type": "string"}, + "NumeroIban": {"type": "string"}, + "CleIban": {"type": "string"}, + "CodeBic": {"type": "string"}, + }, } @@ -434,8 +471,14 @@ class AstreGS(BaseResource): description=_('Create link between user and association'), perm='can_access', parameters={ - 'NameID': {'description': _('Publik NameID'), 'example_value': 'xyz24d934',}, - 'association_id': {'description': _('Association ID'), 'example_value': '12345',}, + 'NameID': { + 'description': _('Publik NameID'), + 'example_value': 'xyz24d934', + }, + 'association_id': { + 'description': _('Association ID'), + 'example_value': '12345', + }, }, ) def link(self, request, NameID, association_id): @@ -464,7 +507,12 @@ class AstreGS(BaseResource): @endpoint( description=_('List user links'), perm='can_access', - parameters={'NameID': {'description': _('Publik NameID'), 'example_value': 'xyz24d934',}}, + parameters={ + 'NameID': { + 'description': _('Publik NameID'), + 'example_value': 'xyz24d934', + } + }, ) def links(self, request, NameID): if not Link.objects.filter(resource=self, name_id=NameID).exists(): @@ -508,7 +556,12 @@ class AstreGS(BaseResource): name='get-contact', perm='can_access', description=_('Get contact details'), - parameters={'contact_id': {'description': _('Contact identifier'), 'example_value': '1111',}}, + parameters={ + 'contact_id': { + 'description': _('Contact identifier'), + 'example_value': '1111', + } + }, ) def get_contact(self, request, contact_id): r = self.call('Contact', 'Chargement', ContactCle={'idContact': contact_id}) @@ -533,7 +586,9 @@ class AstreGS(BaseResource): description=_('Delete contact'), name='delete-contact', perm='can_access', - parameters={'contact_id': {'description': _('Contact ID'), 'example_value': '4242'},}, + parameters={ + 'contact_id': {'description': _('Contact ID'), 'example_value': '4242'}, + }, ) def delete_contact(self, request, contact_id): r = self.call('Contact', 'Suppression', ContactCle={'idContact': contact_id}) @@ -615,31 +670,31 @@ class AstreGS(BaseResource): r = self.call('TiersRib', 'Creation', TiersRib=post_data) return {'data': serialize_object(r)} - @endpoint( - name='get-tiers-rib', perm='can_access', + name='get-tiers-rib', + perm='can_access', description=_('Get RIB'), parameters={ 'CodeTiers': {'example_value': '42435'}, 'IdRib': {'example_value': '4242'}, - } + }, ) def get_tiers_rib(self, request, CodeTiers, IdRib): payload = {'CodeTiers': CodeTiers, 'IdRib': IdRib} r = self.call('TiersRib', 'Chargement', TiersRibCle=payload) return {'data': serialize_object(r)} - @endpoint( - name='update-tiers-rib', perm='can_access', + name='update-tiers-rib', + perm='can_access', post={ 'description': _('Update RIB'), - 'request_body': {'schema': {'application/json': TIERS_RIB_UPDATE_SCHEMA}} + 'request_body': {'schema': {'application/json': TIERS_RIB_UPDATE_SCHEMA}}, }, parameters={ 'CodeTiers': {'example_value': '42435'}, 'IdRib': {'example_value': '4242'}, - } + }, ) def update_tiers_rib(self, request, CodeTiers, IdRib, post_data): post_data['CodeTiers'] = CodeTiers @@ -647,32 +702,33 @@ class AstreGS(BaseResource): r = self.call('TiersRib', 'Modification', TiersRib=post_data) return {'data': serialize_object(r)} - - @endpoint(name='delete-tiers-rib', perm='can_access', + @endpoint( + name='delete-tiers-rib', + perm='can_access', description=_('Delete RIB'), parameters={ 'CodeTiers': {'example_value': '42435'}, 'IdRib': {'example_value': '4242'}, - } + }, ) def delete_tiers_rib(self, request, CodeTiers, IdRib): payload = {'CodeTiers': CodeTiers, 'IdRib': IdRib} r = self.call('TiersRib', 'Suppression', TiersRibCle=payload) return {'data': serialize_object(r)} - @endpoint(name='find-tiers-by-rib', perm='can_access', + @endpoint( + name='find-tiers-by-rib', + perm='can_access', description=_('Find person by RIB'), parameters={ 'banque': {'example_value': '30001'}, 'guichet': {'example_value': '00794'}, 'numero_compte': {'example_value': '12345678901'}, 'cle': {'example_value': '85'}, - } + }, ) def find_tiers_by_rib(self, request, banque, guichet, numero_compte, cle, **kwargs): - criteres = {'banque': banque, 'guichet': guichet, - 'numeroCompte': numero_compte, - 'cleRIB': cle} + criteres = {'banque': banque, 'guichet': guichet, 'numeroCompte': numero_compte, 'cleRIB': cle} # add other params to search criterias criteres.update(kwargs) r = self.search_tiers(criteres) diff --git a/passerelle/apps/atal/migrations/0001_initial.py b/passerelle/apps/atal/migrations/0001_initial.py index 3213a4cd..a8649cf6 100644 --- a/passerelle/apps/atal/migrations/0001_initial.py +++ b/passerelle/apps/atal/migrations/0001_initial.py @@ -17,12 +17,30 @@ class Migration(migrations.Migration): migrations.CreateModel( name='ATALConnector', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), - ('base_soap_url', models.URLField(help_text='URL of the base SOAP endpoint', max_length=400, verbose_name='Base SOAP endpoint')), - ('users', models.ManyToManyField(blank=True, related_name='_atalconnector_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'base_soap_url', + models.URLField( + help_text='URL of the base SOAP endpoint', + max_length=400, + verbose_name='Base SOAP endpoint', + ), + ), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_atalconnector_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'ATAL connector', diff --git a/passerelle/apps/atal/models.py b/passerelle/apps/atal/models.py index 0ce3db7a..5929b661 100644 --- a/passerelle/apps/atal/models.py +++ b/passerelle/apps/atal/models.py @@ -43,17 +43,14 @@ def process_response(demande_number): class ATALConnector(BaseResource): base_soap_url = models.URLField( - max_length=400, verbose_name=_('Base SOAP endpoint'), - help_text=_('URL of the base SOAP endpoint')) + max_length=400, verbose_name=_('Base SOAP endpoint'), help_text=_('URL of the base SOAP endpoint') + ) category = _('Business Process Connectors') class Meta: verbose_name = _('ATAL connector') - DEMANDE_NUMBER_PARAM = { - 'description': _('Demande number'), - 'example_value': 'DIT18050001' - } + DEMANDE_NUMBER_PARAM = {'description': _('Demande number'), 'example_value': 'DIT18050001'} def _soap_call(self, wsdl, method, **kwargs): wsdl_url = urllib.parse.urljoin(self.base_soap_url, '%s?wsdl' % wsdl) @@ -96,34 +93,29 @@ class ATALConnector(BaseResource): return self._xml_ref('VilleAgileService', 'getTypesEquipement', 'types') @endpoint( - perm='can_access', name='insert-action-comment', + perm='can_access', + name='insert-action-comment', post={ 'description': _('Insert action comment'), - 'request_body': { - 'schema': { - 'application/json': schemas.INSERT_ACTION_COMMENT - } - } - } + 'request_body': {'schema': {'application/json': schemas.INSERT_ACTION_COMMENT}}, + }, ) def insert_action_comment(self, request, post_data): demande_number = self._soap_call( - wsdl='DemandeService', method='insertActionComment', + wsdl='DemandeService', + method='insertActionComment', numeroDemande=post_data['numero_demande'], - commentaire=post_data['commentaire'] + commentaire=post_data['commentaire'], ) return process_response(demande_number) @endpoint( - perm='can_access', name='insert-demande-complet-by-type', + perm='can_access', + name='insert-demande-complet-by-type', post={ 'description': _('Insert demande complet by type'), - 'request_body': { - 'schema': { - 'application/json': schemas.INSERT_DEMANDE_COMPLET_BY_TYPE - } - } - } + 'request_body': {'schema': {'application/json': schemas.INSERT_DEMANDE_COMPLET_BY_TYPE}}, + }, ) def insert_demande_complet_by_type(self, request, post_data): data = {} @@ -170,39 +162,39 @@ class ATALConnector(BaseResource): if recv in post_data: data[send] = post_data[recv] - demande_number = self._soap_call( - wsdl='DemandeService', method='insertDemandeCompletByType', **data - ) + demande_number = self._soap_call(wsdl='DemandeService', method='insertDemandeCompletByType', **data) return process_response(demande_number) @endpoint( - methods=['get'], perm='can_access', example_pattern='{demande_number}/', - pattern='^(?P\w+)/$', name='retrieve-details-demande', - parameters={ - 'demande_number': DEMANDE_NUMBER_PARAM - } + methods=['get'], + perm='can_access', + example_pattern='{demande_number}/', + pattern='^(?P\w+)/$', + name='retrieve-details-demande', + parameters={'demande_number': DEMANDE_NUMBER_PARAM}, ) def retrieve_details_demande(self, request, demande_number): soap_res = self._soap_call( - wsdl='DemandeService', method='retrieveDetailsDemande', - demandeNumberParam=demande_number) + wsdl='DemandeService', method='retrieveDetailsDemande', demandeNumberParam=demande_number + ) return {'data': helpers.serialize_object(soap_res)} @endpoint( - methods=['get'], perm='can_access', example_pattern='{demande_number}/', - pattern='^(?P\w+)/$', name='retrieve-etat-travaux', - parameters={ - 'demande_number': DEMANDE_NUMBER_PARAM - } + methods=['get'], + perm='can_access', + example_pattern='{demande_number}/', + pattern='^(?P\w+)/$', + name='retrieve-etat-travaux', + parameters={'demande_number': DEMANDE_NUMBER_PARAM}, ) def retrieve_etat_travaux(self, request, demande_number): - soap_res = self._soap_call( - wsdl='DemandeService', method='retrieveEtatTravaux', - numero=demande_number) + soap_res = self._soap_call(wsdl='DemandeService', method='retrieveEtatTravaux', numero=demande_number) return {'data': helpers.serialize_object(soap_res)} @endpoint( - methods=['get'], perm='can_access', example_pattern='{demande_number}/', + methods=['get'], + perm='can_access', + example_pattern='{demande_number}/', pattern='^(?P\w+)/$', parameters={ 'demande_number': DEMANDE_NUMBER_PARAM, @@ -210,14 +202,14 @@ class ATALConnector(BaseResource): 'description': _('Full'), 'example_value': 'true', 'type': 'bool', - } - } + }, + }, ) def infos(self, request, demande_number, full=False): demand_details = helpers.serialize_object( self._soap_call( - wsdl='DemandeService', method='retrieveDetailsDemande', - demandeNumberParam=demande_number) + wsdl='DemandeService', method='retrieveDetailsDemande', demandeNumberParam=demande_number + ) ) if not demand_details: raise APIError('Could not get a status') @@ -230,18 +222,12 @@ class ATALConnector(BaseResource): works_comments = [] if responses: for response in responses: - comment = { - 'text': response.get('commentaires'), - 'date': None - } + comment = {'text': response.get('commentaires'), 'date': None} if 'dateReponse' in response: comment['date'] = dateformat.format(response['dateReponse'], DATE_FORMAT) works_comments.append(comment) - works_comment = { - 'text': None, - 'date': None - } + works_comment = {'text': None, 'date': None} if works_comments: works_comment = works_comments[-1] @@ -249,22 +235,17 @@ class ATALConnector(BaseResource): 'status': status, 'works_comment': works_comment, 'demand_details': None, - 'works_comments': [] + 'works_comments': [], } if full: data['demand_details'] = demand_details data['works_comments'] = works_comments if status not in ('PRISE EN COMPTE', 'ARCHIVEE'): - return { - 'data': data - } + return {'data': data} works_status = helpers.serialize_object( - self._soap_call( - wsdl='DemandeService', method='retrieveEtatTravaux', - numero=demande_number - ) + self._soap_call(wsdl='DemandeService', method='retrieveEtatTravaux', numero=demande_number) ) status = works_status.get('libelle') if not status: @@ -277,20 +258,14 @@ class ATALConnector(BaseResource): if full: data['works_status'] = works_status - return { - 'data': data - } + return {'data': data} @endpoint( perm='can_access', post={ 'description': _('Upload a file'), - 'request_body': { - 'schema': { - 'application/json': schemas.UPLOAD - } - } - } + 'request_body': {'schema': {'application/json': schemas.UPLOAD}}, + }, ) def upload(self, request, post_data): try: @@ -301,23 +276,22 @@ class ATALConnector(BaseResource): data = { 'donneesFichier': content, 'numeroDemande': post_data['numero_demande'], - 'nomFichier': post_data['nom_fichier'] + 'nomFichier': post_data['nom_fichier'], } - self._soap_call( - wsdl='ChargementPiecesJointesService', method='upload', - **data - ) + self._soap_call(wsdl='ChargementPiecesJointesService', method='upload', **data) return {} @endpoint( - methods=['get'], perm='can_access', example_pattern='{demande_number}/', - pattern='^(?P\w+)/$', name='new-comments', + methods=['get'], + perm='can_access', + example_pattern='{demande_number}/', + pattern='^(?P\w+)/$', + name='new-comments', parameters={ 'demande_number': DEMANDE_NUMBER_PARAM, - } + }, ) def new_comments(self, request, demande_number, last_datetime=None): - def issup(datetime1, datetime2): if datetime1.tzinfo is None or datetime2.tzinfo is None: datetime1 = datetime1.replace(tzinfo=None) @@ -331,8 +305,8 @@ class ATALConnector(BaseResource): demand_details = helpers.serialize_object( self._soap_call( - wsdl='DemandeService', method='retrieveDetailsDemande', - demandeNumberParam=demande_number) + wsdl='DemandeService', method='retrieveDetailsDemande', demandeNumberParam=demande_number + ) ) if not demand_details: raise APIError('Could not get comments') @@ -340,11 +314,7 @@ class ATALConnector(BaseResource): new_comments, all_comments, last_date = [], [], None responses = (demand_details.get('reponses') or {}).get('Reponse') or [] for response in responses: - comment = { - 'text': response.get('commentaires'), - 'date': None, - 'date_raw': None - } + comment = {'text': response.get('commentaires'), 'date': None, 'date_raw': None} dateobj = None if 'dateReponse' in response: dateobj = response['dateReponse'] @@ -356,10 +326,4 @@ class ATALConnector(BaseResource): if dateobj and issup(dateobj, last_datetime) or last_datetime is None: if comment not in new_comments: new_comments.append(comment) - return { - 'data': { - 'new_comments': new_comments, - 'all_comments': all_comments, - 'last_date': last_date - } - } + return {'data': {'new_comments': new_comments, 'all_comments': all_comments, 'last_date': last_date}} diff --git a/passerelle/apps/atal/schemas.py b/passerelle/apps/atal/schemas.py index d9b6aa1f..77c32bb0 100644 --- a/passerelle/apps/atal/schemas.py +++ b/passerelle/apps/atal/schemas.py @@ -122,25 +122,15 @@ INSERT_DEMANDE_COMPLET_BY_TYPE = { 'demande_commentaire': { 'type': 'string', }, - 'remote_adresse': { - 'type': 'string' - }, - 'demande_mots_cles': { - 'type': 'string' - }, + 'remote_adresse': {'type': 'string'}, + 'demande_mots_cles': {'type': 'string'}, 'code_thematique': { 'type': 'string', }, - 'code_priorite': { - 'type': 'string' - }, - 'demande_thematique': { - 'type': 'string' - }, - 'code_projet': { - 'type': 'string' - } - } + 'code_priorite': {'type': 'string'}, + 'demande_thematique': {'type': 'string'}, + 'code_projet': {'type': 'string'}, + }, } INSERT_ACTION_COMMENT = { @@ -153,8 +143,8 @@ INSERT_ACTION_COMMENT = { }, 'commentaire': { 'type': 'string', - } - } + }, + }, } UPLOAD = { @@ -169,13 +159,13 @@ UPLOAD = { 'content': { 'type': 'string', }, - } + }, }, 'numero_demande': { 'type': 'string', }, 'nom_fichier': { 'type': 'string', - } - } + }, + }, } diff --git a/passerelle/apps/atos_genesys/migrations/0001_initial.py b/passerelle/apps/atos_genesys/migrations/0001_initial.py index 28738f10..46afa293 100644 --- a/passerelle/apps/atos_genesys/migrations/0001_initial.py +++ b/passerelle/apps/atos_genesys/migrations/0001_initial.py @@ -19,7 +19,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Link', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('name_id', models.CharField(max_length=256, verbose_name='NameID')), ('id_per', models.CharField(max_length=64, verbose_name='ID Per')), ('created', models.DateTimeField(auto_now_add=True, verbose_name='Creation date')), @@ -32,20 +35,65 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Resource', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), - ('log_level', models.CharField(choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL'), (b'FATAL', b'FATAL')], default=b'INFO', max_length=10, verbose_name='Log Level')), - ('basic_auth_username', models.CharField(blank=True, max_length=128, verbose_name='Basic authentication username')), - ('basic_auth_password', models.CharField(blank=True, max_length=128, verbose_name='Basic authentication password')), - ('client_certificate', models.FileField(blank=True, null=True, upload_to=b'', verbose_name='TLS client certificate')), - ('trusted_certificate_authorities', models.FileField(blank=True, null=True, upload_to=b'', verbose_name='TLS trusted CAs')), + ( + 'log_level', + models.CharField( + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + (b'FATAL', b'FATAL'), + ], + default=b'INFO', + max_length=10, + verbose_name='Log Level', + ), + ), + ( + 'basic_auth_username', + models.CharField( + blank=True, max_length=128, verbose_name='Basic authentication username' + ), + ), + ( + 'basic_auth_password', + models.CharField( + blank=True, max_length=128, verbose_name='Basic authentication password' + ), + ), + ( + 'client_certificate', + models.FileField( + blank=True, null=True, upload_to=b'', verbose_name='TLS client certificate' + ), + ), + ( + 'trusted_certificate_authorities', + models.FileField(blank=True, null=True, upload_to=b'', verbose_name='TLS trusted CAs'), + ), ('verify_cert', models.BooleanField(default=True, verbose_name='TLS verify certificates')), - ('http_proxy', models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy')), + ( + 'http_proxy', + models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'), + ), ('webservice_base_url', models.URLField(verbose_name='Webservice Base URL')), ('cod_rgp', models.CharField(default=b'RGP_PUB', max_length=64, verbose_name='Code RGP')), - ('users', models.ManyToManyField(blank=True, related_name='_link_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'users', + models.ManyToManyField( + blank=True, related_name='_link_users_+', related_query_name='+', to='base.ApiUser' + ), + ), ], options={ 'verbose_name': 'ATOS Genesys', diff --git a/passerelle/apps/atos_genesys/migrations/0003_auto_20200504_1402.py b/passerelle/apps/atos_genesys/migrations/0003_auto_20200504_1402.py index 82e1766a..e1ef6a7e 100644 --- a/passerelle/apps/atos_genesys/migrations/0003_auto_20200504_1402.py +++ b/passerelle/apps/atos_genesys/migrations/0003_auto_20200504_1402.py @@ -15,7 +15,9 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='resource', name='client_certificate', - field=models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS client certificate'), + field=models.FileField( + blank=True, null=True, upload_to='', verbose_name='TLS client certificate' + ), ), migrations.AlterField( model_name='resource', diff --git a/passerelle/apps/atos_genesys/models.py b/passerelle/apps/atos_genesys/models.py index 4f1ab273..49990d7d 100644 --- a/passerelle/apps/atos_genesys/models.py +++ b/passerelle/apps/atos_genesys/models.py @@ -93,7 +93,7 @@ class Resource(BaseResource, HTTPResource): continue categories[xmlutils.text_content(code)] = { 'label': xmlutils.text_content(label), - 'codifications': [] + 'codifications': [], } for codification in root.findall('CODIFICATIONS/CODIFICATIONS_ROW'): code = codification.find('CD_CODIF') @@ -107,11 +107,15 @@ class Resource(BaseResource, HTTPResource): if category_cod not in categories: self.logger.warning('unknown category: %s', category_cod) continue - categories[category_cod]['codifications'].append({ - 'code': xmlutils.text_content(code), - 'label': xmlutils.text_content(label), - 'enabled': xmlutils.text_content(in_val).strip().lower() == 'o' if in_val is not None else True, - }) + categories[category_cod]['codifications'].append( + { + 'code': xmlutils.text_content(code), + 'label': xmlutils.text_content(label), + 'enabled': xmlutils.text_content(in_val).strip().lower() == 'o' + if in_val is not None + else True, + } + ) return categories def get_codifications(self): @@ -119,40 +123,43 @@ class Resource(BaseResource, HTTPResource): function=self.call_select_codifications, row=self, key_prefix='atos-genesys-codifications', - logger=self.logger) + logger=self.logger, + ) return cache() - @endpoint(name='codifications', - description=_('List of codifications categories')) + @endpoint(name='codifications', description=_('List of codifications categories')) def codifications(self, request): codifications = self.get_codifications() items = [] for code, category in codifications.items(): - items.append({ - 'id': code, - 'label': category['label'], - }) + items.append( + { + 'id': code, + 'label': category['label'], + } + ) items.sort(key=lambda c: c['label']) return {'data': items} - @endpoint(name='codifications', - pattern=r'^(?P[\w-]+)/$', - example_pattern='{category}/', - description=_('List of codifications'), - parameters={ - 'category': { - 'description': _('Category of codification'), - 'example_value': u'MOT_APA', - } - }) + @endpoint( + name='codifications', + pattern=r'^(?P[\w-]+)/$', + example_pattern='{category}/', + description=_('List of codifications'), + parameters={ + 'category': { + 'description': _('Category of codification'), + 'example_value': u'MOT_APA', + } + }, + ) def codifications_list(self, request, category): codifications = self.get_codifications().get(category, {}).get('codifications', []) - items = [{ - 'id': codification['code'], - 'text': codification['label'] - } for codification in codifications] + items = [ + {'id': codification['code'], 'text': codification['label']} for codification in codifications + ] return {'data': items} def check_status(self): @@ -163,11 +170,14 @@ class Resource(BaseResource, HTTPResource): return urlparse.urljoin(self.base_url, 'WSUsagerPublik/services/PublikService/selectAppairage') def call_select_appairage(self, login, password, email): - row = self.xml_request(self.select_appairage_url, params={ - 'login': login, - 'pwd': password, - 'email': email, - }) + row = self.xml_request( + self.select_appairage_url, + params={ + 'login': login, + 'pwd': password, + 'email': email, + }, + ) row_d = xmlutils.to_json(row) id_per = row_d.get('ID_PER', '').strip() code = row_d.get('CD_RET', '').strip() @@ -175,72 +185,70 @@ class Resource(BaseResource, HTTPResource): error = None if code not in ['1', '2', '3', '4', '5', '6']: - error = 'invalid CD_RET: %s' % code, + error = ('invalid CD_RET: %s' % code,) if code in ['2', '3', '5'] and not id_per: error = 'missing ID_PER' if error: raise APIError(error, data={'response': repr(ET.tostring(row))}) return code, label, id_per - @endpoint(name='link', - methods=['post'], - description=_('Create link with an extranet account'), - perm='can_access', - parameters={ - 'NameID':{ - 'description': _('Publik NameID'), - 'example_value': 'xyz24d934', - }, - 'email': { - 'description': _('Publik known email'), - 'example_value': 'john.doe@example.com', - }, - 'login': { - 'description': _('ATOS Genesys extranet login'), - 'example_value': '1234', - }, - 'password': { - 'description': _('ATOS Genesys extranet password'), - 'example_value': 'password', - } - }) + @endpoint( + name='link', + methods=['post'], + description=_('Create link with an extranet account'), + perm='can_access', + parameters={ + 'NameID': { + 'description': _('Publik NameID'), + 'example_value': 'xyz24d934', + }, + 'email': { + 'description': _('Publik known email'), + 'example_value': 'john.doe@example.com', + }, + 'login': { + 'description': _('ATOS Genesys extranet login'), + 'example_value': '1234', + }, + 'password': { + 'description': _('ATOS Genesys extranet password'), + 'example_value': 'password', + }, + }, + ) def link(self, request, NameID, email, login, password): code, label, id_per = self.call_select_appairage(login, password, email) if code in ['2', '3', '5']: - link, created = Link.objects.get_or_create( - resource=self, - name_id=NameID, - id_per=id_per) + link, created = Link.objects.get_or_create(resource=self, name_id=NameID, id_per=id_per) return {'link_id': link.pk, 'new': created, 'code': code, 'label': label} elif code == '6': raise APIError('unknown-login', data={'code': code, 'label': label}) elif code in ['4', '1']: raise APIError('invalid-password', data={'code': code, 'label': label}) - @endpoint(name='unlink', - methods=['post'], - description=_('Delete link with an extranet account'), - perm='can_access', - parameters={ - 'NameID':{ - 'description': _('Publik NameID'), - 'example_value': 'xyz24d934', - }, - 'link_id': { - 'description': _('Identifier of the link'), - 'example_value': '1', - }, - }) + @endpoint( + name='unlink', + methods=['post'], + description=_('Delete link with an extranet account'), + perm='can_access', + parameters={ + 'NameID': { + 'description': _('Publik NameID'), + 'example_value': 'xyz24d934', + }, + 'link_id': { + 'description': _('Identifier of the link'), + 'example_value': '1', + }, + }, + ) def unlink(self, request, NameID, link_id): try: link_id = int(link_id.strip()) except ValueError: raise APIError('invalid link_id') - qs = Link.objects.filter( - resource=self, - name_id=NameID, - pk=link_id) + qs = Link.objects.filter(resource=self, name_id=NameID, pk=link_id) count = qs.count() qs.delete() return {'deleted': count} @@ -250,10 +258,13 @@ class Resource(BaseResource, HTTPResource): return urlparse.urljoin(self.base_url, 'WSUsagerPublik/services/PublikService/selectUsager') def call_select_usager(self, id_per): - row = self.xml_request(self.select_usager_url, params={ - 'idPer': id_per, - 'codRgp': self.cod_rgp, - }) + row = self.xml_request( + self.select_usager_url, + params={ + 'idPer': id_per, + 'codRgp': self.cod_rgp, + }, + ) return self._select_usager_row_to_json(row) def _select_usager_row_to_json(self, row): @@ -275,19 +286,19 @@ class Resource(BaseResource, HTTPResource): identification['CIVILITE'] = {'M': u'Monsieur', 'F': u'Madame'}.get(sexe, '') return d - @endpoint(name='dossiers', - description=_('Get datas for all links'), - perm='can_access', - parameters={ - 'NameID':{ - 'description': _('Publik NameID'), - 'example_value': 'xyz24d934', - }, - }) + @endpoint( + name='dossiers', + description=_('Get datas for all links'), + perm='can_access', + parameters={ + 'NameID': { + 'description': _('Publik NameID'), + 'example_value': 'xyz24d934', + }, + }, + ) def dossiers(self, request, NameID, link_id=None): - qs = Link.objects.filter( - resource=self, - name_id=NameID) + qs = Link.objects.filter(resource=self, name_id=NameID) if link_id: try: link_id = int(link_id) @@ -300,7 +311,8 @@ class Resource(BaseResource, HTTPResource): function=self.call_select_usager, row=link, key_prefix='atos-genesys-usager', - logger=self.logger) + logger=self.logger, + ) dossier = cache(link.id_per) # build text as "id_per - prenom - no text_parts = [str(link.id_per), '-'] @@ -312,12 +324,14 @@ class Resource(BaseResource, HTTPResource): text_parts.append(prenom.title()) if nom: text_parts.append(nom.upper()) - data.append({ - 'id': str(link.id), - 'text': u' '.join(text_parts), - 'id_per': link.id_per, - 'dossier': dossier, - }) + data.append( + { + 'id': str(link.id), + 'text': u' '.join(text_parts), + 'id_per': link.id_per, + 'dossier': dossier, + } + ) if link_id: return {'data': data[0] if data else None} return {'data': data} @@ -327,10 +341,13 @@ class Resource(BaseResource, HTTPResource): return urlparse.urljoin(self.base_url, 'WSUsagerPublik/services/PublikService/selectUsagerByRef') def call_select_usager_by_ref(self, ref_per): - row = self.xml_request(self.select_usager_by_ref_url, params={ - 'refPer': ref_per, - 'codRgp': self.cod_rgp, - }) + row = self.xml_request( + self.select_usager_by_ref_url, + params={ + 'refPer': ref_per, + 'codRgp': self.cod_rgp, + }, + ) return self._select_usager_row_to_json(row) @property @@ -338,31 +355,36 @@ class Resource(BaseResource, HTTPResource): return urlparse.urljoin(self.base_url, 'WSUsagerPublik/services/PublikService/chercheBeneficiaire') def call_cherche_beneficiaire(self, prenom, nom, dob): - rows = self.xml_request_multiple(self.cherche_beneficiaire_url, params={ - 'nmPer': nom, - 'prPer': prenom, - 'dtNaissance': dob.strftime('%d/%m/%Y'), - }) + rows = self.xml_request_multiple( + self.cherche_beneficiaire_url, + params={ + 'nmPer': nom, + 'prPer': prenom, + 'dtNaissance': dob.strftime('%d/%m/%Y'), + }, + ) beneficiaires = [xmlutils.to_json(row) for row in rows] return beneficiaires - @endpoint(name='search', - description=_('Search for beneficiaries'), - perm='can_access', - parameters={ - 'first_name': { - 'description': _('Beneficiary first name'), - 'example_value': 'John', - }, - 'last_name': { - 'description': _('Beneficiary last name'), - 'example_value': 'Doe', - }, - 'date_of_birth': { - 'description': _('Beneficiary date of birth'), - 'example_value': '1987-10-23', - } - }) + @endpoint( + name='search', + description=_('Search for beneficiaries'), + perm='can_access', + parameters={ + 'first_name': { + 'description': _('Beneficiary first name'), + 'example_value': 'John', + }, + 'last_name': { + 'description': _('Beneficiary last name'), + 'example_value': 'Doe', + }, + 'date_of_birth': { + 'description': _('Beneficiary date of birth'), + 'example_value': '1987-10-23', + }, + }, + ) def search(self, request, first_name, last_name, date_of_birth, NameID=None, commune_naissance=None): try: date_of_birth = datetime.datetime.strptime(date_of_birth, '%Y-%m-%d').date() @@ -373,10 +395,7 @@ class Resource(BaseResource, HTTPResource): if commune_naissance: # convert commune_naissance to ASCII commune_naissance = to_ascii(commune_naissance).lower() - beneficiaires = self.call_cherche_beneficiaire( - prenom=first_name, - nom=last_name, - dob=date_of_birth) + beneficiaires = self.call_cherche_beneficiaire(prenom=first_name, nom=last_name, dob=date_of_birth) data = [] dossiers = [] # get dossiers of found beneficiaries @@ -410,8 +429,12 @@ class Resource(BaseResource, HTTPResource): if commune_naissance: cmu_nais = to_ascii(identification.get('CMU_NAIS', '')).lower() if cmu_nais and commune_naissance != cmu_nais: - self.logger.debug(u'id_per %s: CMU_NAIS(%s) does not match commune_naissance(%s)', - id_per, cmu_nais, commune_naissance) + self.logger.debug( + u'id_per %s: CMU_NAIS(%s) does not match commune_naissance(%s)', + id_per, + cmu_nais, + commune_naissance, + ) continue dossiers.append(dossier) @@ -431,38 +454,41 @@ class Resource(BaseResource, HTTPResource): tel2 = ''.join(c for c in identification.get('TEL_FIXE', '') if is_number(c)) email = identification.get('MAIL', '').strip() if tel1 and tel1[:2] in ('06', '07'): - data.append({ - 'id': 'tel1', - 'text': 'par SMS vers ' + tel1[:2] + '*****' + tel1[-3:], - 'phone': tel1, - - 'id_per': id_per, - 'nom': nom, - 'prenom': prenom, - 'nom_naissance': nom_naissance, - }) + data.append( + { + 'id': 'tel1', + 'text': 'par SMS vers ' + tel1[:2] + '*****' + tel1[-3:], + 'phone': tel1, + 'id_per': id_per, + 'nom': nom, + 'prenom': prenom, + 'nom_naissance': nom_naissance, + } + ) if tel2 and tel2[:2] in ('06', '07'): - data.append({ - 'id': 'tel2', - 'text': 'par SMS vers ' + tel2[:2] + '*****' + tel2[-3:], - 'phone': tel2, - - 'id_per': id_per, - 'nom': nom, - 'prenom': prenom, - 'nom_naissance': nom_naissance, - }) + data.append( + { + 'id': 'tel2', + 'text': 'par SMS vers ' + tel2[:2] + '*****' + tel2[-3:], + 'phone': tel2, + 'id_per': id_per, + 'nom': nom, + 'prenom': prenom, + 'nom_naissance': nom_naissance, + } + ) if email: - data.append({ - 'id': 'email1', - 'text': 'par courriel vers ' + email[:2] + '***@***' + email[-3:], - 'email': email, - - 'id_per': id_per, - 'nom': nom, - 'prenom': prenom, - 'nom_naissance': nom_naissance, - }) + data.append( + { + 'id': 'email1', + 'text': 'par courriel vers ' + email[:2] + '***@***' + email[-3:], + 'email': email, + 'id_per': id_per, + 'nom': nom, + 'prenom': prenom, + 'nom_naissance': nom_naissance, + } + ) if len(data) == 0: self.logger.debug('id_per %s: no contact information, ignored', id_per) raise APIError('no-contacts') @@ -476,50 +502,39 @@ class Resource(BaseResource, HTTPResource): 'link_id': link and link.id, } - @endpoint(name='link-by-id-per', - methods=['post'], - description=_('Create link with an extranet account'), - perm='can_access', - parameters={ - 'NameID': { - 'description': _('Publik NameID'), - 'example_value': 'xyz24d934', - }, - 'id_per': { - 'description': _('ATOS Genesys ID_PER'), - 'example_value': '767676', - } - }) + @endpoint( + name='link-by-id-per', + methods=['post'], + description=_('Create link with an extranet account'), + perm='can_access', + parameters={ + 'NameID': { + 'description': _('Publik NameID'), + 'example_value': 'xyz24d934', + }, + 'id_per': { + 'description': _('ATOS Genesys ID_PER'), + 'example_value': '767676', + }, + }, + ) def link_by_id_per(self, request, NameID, id_per): dossier = self.call_select_usager(id_per) - link, created = Link.objects.get_or_create( - resource=self, - name_id=NameID, - id_per=id_per) + link, created = Link.objects.get_or_create(resource=self, name_id=NameID, id_per=id_per) return {'link_id': link.pk, 'new': created} class Link(models.Model): - resource = models.ForeignKey( - Resource, - on_delete=models.CASCADE) - name_id = models.CharField( - verbose_name=_('NameID'), - blank=False, - max_length=256) - id_per = models.CharField( - verbose_name=_('ID Per'), - blank=False, - max_length=64) - created = models.DateTimeField( - verbose_name=_('Creation date'), - auto_now_add=True) - extra = JSONField( - verbose_name=_('Anything'), - null=True) + resource = models.ForeignKey(Resource, on_delete=models.CASCADE) + name_id = models.CharField(verbose_name=_('NameID'), blank=False, max_length=256) + id_per = models.CharField(verbose_name=_('ID Per'), blank=False, max_length=64) + created = models.DateTimeField(verbose_name=_('Creation date'), auto_now_add=True) + extra = JSONField(verbose_name=_('Anything'), null=True) class Meta: unique_together = ( - 'resource', 'name_id', 'id_per', + 'resource', + 'name_id', + 'id_per', ) ordering = ['created'] diff --git a/passerelle/apps/atos_genesys/utils.py b/passerelle/apps/atos_genesys/utils.py index c9acc184..c009888e 100644 --- a/passerelle/apps/atos_genesys/utils.py +++ b/passerelle/apps/atos_genesys/utils.py @@ -25,11 +25,12 @@ def row_lock(row): class RowLockedCache(object): - '''Cache return value of a function, always return the cached value for - performance but if the cache is stale update it asynchronously using - a thread, prevent multiple update using row locks on database models and - an update cache key. - ''' + """Cache return value of a function, always return the cached value for + performance but if the cache is stale update it asynchronously using + a thread, prevent multiple update using row locks on database models and + an update cache key. + """ + def __init__(self, function, logger=None, row=None, duration=DEFAULT_DURATION, key_prefix=None): self.function = function self.row = row diff --git a/passerelle/apps/base_adresse/migrations/0001_initial.py b/passerelle/apps/base_adresse/migrations/0001_initial.py index 0a5afc0d..997a3277 100644 --- a/passerelle/apps/base_adresse/migrations/0001_initial.py +++ b/passerelle/apps/base_adresse/migrations/0001_initial.py @@ -14,12 +14,28 @@ class Migration(migrations.Migration): migrations.CreateModel( name='BaseAddresse', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('service_url', models.CharField(help_text='Base Adresse Web Service URL', max_length=128, verbose_name='Service URL')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_baseaddresse_users_+', related_query_name='+', blank=True)), + ( + 'service_url', + models.CharField( + help_text='Base Adresse Web Service URL', max_length=128, verbose_name='Service URL' + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_baseaddresse_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'Base Adresse Web Service', diff --git a/passerelle/apps/base_adresse/migrations/0002_auto_20150705_0330.py b/passerelle/apps/base_adresse/migrations/0002_auto_20150705_0330.py index be1448c4..9626bd98 100644 --- a/passerelle/apps/base_adresse/migrations/0002_auto_20150705_0330.py +++ b/passerelle/apps/base_adresse/migrations/0002_auto_20150705_0330.py @@ -14,7 +14,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='baseaddresse', name='service_url', - field=models.CharField(default=b'https://api-adresse.data.gouv.fr/', help_text='Base Adresse Web Service URL', max_length=128, verbose_name='Service URL'), + field=models.CharField( + default=b'https://api-adresse.data.gouv.fr/', + help_text='Base Adresse Web Service URL', + max_length=128, + verbose_name='Service URL', + ), preserve_default=True, ), ] diff --git a/passerelle/apps/base_adresse/migrations/0003_baseaddresse_log_level.py b/passerelle/apps/base_adresse/migrations/0003_baseaddresse_log_level.py index 3681ded5..753baea6 100644 --- a/passerelle/apps/base_adresse/migrations/0003_baseaddresse_log_level.py +++ b/passerelle/apps/base_adresse/migrations/0003_baseaddresse_log_level.py @@ -14,7 +14,13 @@ class Migration(migrations.Migration): migrations.AddField( model_name='baseaddresse', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Debug Enabled', + blank=True, + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/base_adresse/migrations/0004_auto_20160316_0910.py b/passerelle/apps/base_adresse/migrations/0004_auto_20160316_0910.py index 0dcf599e..37f4b239 100644 --- a/passerelle/apps/base_adresse/migrations/0004_auto_20160316_0910.py +++ b/passerelle/apps/base_adresse/migrations/0004_auto_20160316_0910.py @@ -14,7 +14,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='baseaddresse', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/base_adresse/migrations/0005_auto_20160407_0456.py b/passerelle/apps/base_adresse/migrations/0005_auto_20160407_0456.py index 68e953c6..52410033 100644 --- a/passerelle/apps/base_adresse/migrations/0005_auto_20160407_0456.py +++ b/passerelle/apps/base_adresse/migrations/0005_auto_20160407_0456.py @@ -14,7 +14,19 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='baseaddresse', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/base_adresse/migrations/0006_rename_model.py b/passerelle/apps/base_adresse/migrations/0006_rename_model.py index 3487ef1f..3e097d37 100644 --- a/passerelle/apps/base_adresse/migrations/0006_rename_model.py +++ b/passerelle/apps/base_adresse/migrations/0006_rename_model.py @@ -10,6 +10,4 @@ class Migration(migrations.Migration): ('base_adresse', '0005_auto_20160407_0456'), ] - operations = [ - migrations.RenameModel('BaseAddresse', 'BaseAdresse') - ] + operations = [migrations.RenameModel('BaseAddresse', 'BaseAdresse')] diff --git a/passerelle/apps/base_adresse/migrations/0007_auto_20160729_1540.py b/passerelle/apps/base_adresse/migrations/0007_auto_20160729_1540.py index 2b65c07a..f0b6215d 100644 --- a/passerelle/apps/base_adresse/migrations/0007_auto_20160729_1540.py +++ b/passerelle/apps/base_adresse/migrations/0007_auto_20160729_1540.py @@ -14,7 +14,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='StreetModel', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('city', models.CharField(max_length=100, verbose_name='City')), ('name', models.CharField(max_length=150, verbose_name='Street name')), ('zipcode', models.CharField(max_length=5, verbose_name='Postal code')), @@ -26,7 +29,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='UpdateStreetModel', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('zipcode', models.CharField(max_length=5, verbose_name='Postal code')), ('start_time', models.DateTimeField(null=True, verbose_name='Start of update')), ('end_time', models.DateTimeField(null=True, verbose_name='End of update')), @@ -35,6 +41,8 @@ class Migration(migrations.Migration): migrations.AddField( model_name='baseadresse', name='zipcode', - field=models.CharField(max_length=5, verbose_name='Postal codes to get streets, separated with commas', blank=True), + field=models.CharField( + max_length=5, verbose_name='Postal codes to get streets, separated with commas', blank=True + ), ), ] diff --git a/passerelle/apps/base_adresse/migrations/0014_auto_20190207_0456.py b/passerelle/apps/base_adresse/migrations/0014_auto_20190207_0456.py index 716a9bd2..605ce23b 100644 --- a/passerelle/apps/base_adresse/migrations/0014_auto_20190207_0456.py +++ b/passerelle/apps/base_adresse/migrations/0014_auto_20190207_0456.py @@ -15,6 +15,10 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='baseadresse', name='zipcode', - field=models.CharField(blank=True, max_length=600, verbose_name='Postal codes or county number to get streets, separated with commas'), + field=models.CharField( + blank=True, + max_length=600, + verbose_name='Postal codes or county number to get streets, separated with commas', + ), ), ] diff --git a/passerelle/apps/base_adresse/migrations/0015_auto_20191206_1244.py b/passerelle/apps/base_adresse/migrations/0015_auto_20191206_1244.py index 8061b976..49ff6895 100644 --- a/passerelle/apps/base_adresse/migrations/0015_auto_20191206_1244.py +++ b/passerelle/apps/base_adresse/migrations/0015_auto_20191206_1244.py @@ -17,9 +17,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name='CityModel', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('name', models.CharField(max_length=150, verbose_name='City name')), - ('unaccent_name', models.CharField(max_length=150, null=True, verbose_name='City name ascii char')), + ( + 'unaccent_name', + models.CharField(max_length=150, null=True, verbose_name='City name ascii char'), + ), ('code', models.CharField(max_length=5, verbose_name='INSEE code')), ('zipcode', models.CharField(max_length=5, verbose_name='Postal code')), ('population', models.PositiveIntegerField(verbose_name='Population')), @@ -33,9 +39,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name='DepartmentModel', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('name', models.CharField(max_length=100, verbose_name='Department name')), - ('unaccent_name', models.CharField(max_length=150, null=True, verbose_name='Department name ascii char')), + ( + 'unaccent_name', + models.CharField(max_length=150, null=True, verbose_name='Department name ascii char'), + ), ('code', models.CharField(max_length=3, unique=True, verbose_name='Department code')), ('last_update', models.DateTimeField(auto_now=True, null=True, verbose_name='Last update')), ], @@ -47,9 +59,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name='RegionModel', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('name', models.CharField(max_length=150, verbose_name='Region name')), - ('unaccent_name', models.CharField(max_length=150, null=True, verbose_name='Region name ascii char')), + ( + 'unaccent_name', + models.CharField(max_length=150, null=True, verbose_name='Region name ascii char'), + ), ('code', models.CharField(max_length=2, unique=True, verbose_name='Region code')), ('last_update', models.DateTimeField(auto_now=True, null=True, verbose_name='Last update')), ], @@ -61,12 +79,21 @@ class Migration(migrations.Migration): migrations.AddField( model_name='baseadresse', name='api_geo_url', - field=models.CharField(default=b'https://geo.api.gouv.fr/', help_text='Base Adresse API Geo URL', max_length=128, verbose_name='API Geo URL'), + field=models.CharField( + default=b'https://geo.api.gouv.fr/', + help_text='Base Adresse API Geo URL', + max_length=128, + verbose_name='API Geo URL', + ), ), migrations.AlterField( model_name='baseadresse', name='zipcode', - field=models.CharField(blank=True, max_length=600, verbose_name='Postal codes or department number to get streets, separated with commas'), + field=models.CharField( + blank=True, + max_length=600, + verbose_name='Postal codes or department number to get streets, separated with commas', + ), ), migrations.AlterField( model_name='streetmodel', @@ -76,17 +103,29 @@ class Migration(migrations.Migration): migrations.AddField( model_name='departmentmodel', name='region', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base_adresse.RegionModel'), + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to='base_adresse.RegionModel' + ), ), migrations.AddField( model_name='citymodel', name='department', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='base_adresse.DepartmentModel'), + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to='base_adresse.DepartmentModel', + ), ), migrations.AddField( model_name='citymodel', name='region', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='base_adresse.RegionModel'), + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to='base_adresse.RegionModel', + ), ), migrations.AlterUniqueTogether( name='citymodel', diff --git a/passerelle/apps/base_adresse/migrations/0016_auto_20200130_1604.py b/passerelle/apps/base_adresse/migrations/0016_auto_20200130_1604.py index edb9d8f1..f08d3131 100644 --- a/passerelle/apps/base_adresse/migrations/0016_auto_20200130_1604.py +++ b/passerelle/apps/base_adresse/migrations/0016_auto_20200130_1604.py @@ -16,7 +16,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='AddressCacheModel', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('api_id', models.CharField(max_length=30, unique=True)), ('data', django.contrib.postgres.fields.jsonb.JSONField(default=dict)), ('timestamp', models.DateTimeField(auto_now=True)), @@ -25,11 +28,21 @@ class Migration(migrations.Migration): migrations.AddField( model_name='baseadresse', name='latitude', - field=models.FloatField(blank=True, help_text='Geographic priority for /addresses/ endpoint.', null=True, verbose_name='Latitude'), + field=models.FloatField( + blank=True, + help_text='Geographic priority for /addresses/ endpoint.', + null=True, + verbose_name='Latitude', + ), ), migrations.AddField( model_name='baseadresse', name='longitude', - field=models.FloatField(blank=True, help_text='Geographic priority for /addresses/ endpoint.', null=True, verbose_name='Longitude'), + field=models.FloatField( + blank=True, + help_text='Geographic priority for /addresses/ endpoint.', + null=True, + verbose_name='Longitude', + ), ), ] diff --git a/passerelle/apps/base_adresse/migrations/0017_auto_20200504_1402.py b/passerelle/apps/base_adresse/migrations/0017_auto_20200504_1402.py index 696417a9..f917b808 100644 --- a/passerelle/apps/base_adresse/migrations/0017_auto_20200504_1402.py +++ b/passerelle/apps/base_adresse/migrations/0017_auto_20200504_1402.py @@ -21,11 +21,21 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='baseadresse', name='api_geo_url', - field=models.CharField(default='https://geo.api.gouv.fr/', help_text='Base Adresse API Geo URL', max_length=128, verbose_name='API Geo URL'), + field=models.CharField( + default='https://geo.api.gouv.fr/', + help_text='Base Adresse API Geo URL', + max_length=128, + verbose_name='API Geo URL', + ), ), migrations.AlterField( model_name='baseadresse', name='service_url', - field=models.CharField(default='https://api-adresse.data.gouv.fr/', help_text='Base Adresse Web Service URL', max_length=128, verbose_name='Service URL'), + field=models.CharField( + default='https://api-adresse.data.gouv.fr/', + help_text='Base Adresse Web Service URL', + max_length=128, + verbose_name='Service URL', + ), ), ] diff --git a/passerelle/apps/base_adresse/models.py b/passerelle/apps/base_adresse/models.py index 2733e3b3..d79a9136 100644 --- a/passerelle/apps/base_adresse/models.py +++ b/passerelle/apps/base_adresse/models.py @@ -22,16 +22,20 @@ from passerelle.utils.jsonresponse import APIError class BaseAdresse(BaseResource): service_url = models.CharField( - max_length=128, blank=False, + max_length=128, + blank=False, default='https://api-adresse.data.gouv.fr/', verbose_name=_('Service URL'), - help_text=_('Base Adresse Web Service URL')) + help_text=_('Base Adresse Web Service URL'), + ) api_geo_url = models.CharField( - max_length=128, blank=False, + max_length=128, + blank=False, default='https://geo.api.gouv.fr/', verbose_name=_('API Geo URL'), - help_text=_('Base Adresse API Geo URL')) + help_text=_('Base Adresse API Geo URL'), + ) category = _('Geographic information system') @@ -46,15 +50,18 @@ class BaseAdresse(BaseResource): zipcode = models.CharField( max_length=600, blank=True, - verbose_name=_('Postal codes or department number to get streets, separated with commas')) + verbose_name=_('Postal codes or department number to get streets, separated with commas'), + ) latitude = models.FloatField( - null=True, blank=True, + null=True, + blank=True, verbose_name=_('Latitude'), help_text=_('Geographic priority for /addresses/ endpoint.'), ) longitude = models.FloatField( - null=True, blank=True, + null=True, + blank=True, verbose_name=_('Longitude'), help_text=_('Geographic priority for /addresses/ endpoint.'), ) @@ -78,29 +85,38 @@ class BaseAdresse(BaseResource): elif prop == 'name': house_number = data['properties'].get('housenumber') if house_number and value.startswith(house_number): - value = value[len(house_number):].strip() + value = value[len(house_number) :].strip() result['address']['road'] = value elif prop == 'id': result['id'] = value return result - @endpoint(pattern='(?P.+)?$', - description=_('Addresses list'), - parameters={ - 'id': {'description': _('Address identifier')}, - 'q': {'description': _('Address'), 'example_value': '169 rue du chateau, paris'}, - 'page_limit': {'description': _('Maximum number of results to return. Must be ' - 'lower than 20.')}, - 'zipcode': {'description': _('Zipcode'), 'example_value': '75014'}, - 'citycode': {'description': _('INSEE City code')}, - 'lat': {'description': _('Prioritize results according to coordinates. "lon" ' - 'parameter must also be present.')}, - 'lon': {'description': _('Prioritize results according to coordinates. "lat" ' - 'parameter must also be present.')}, - }) - def addresses(self, request, id=None, q=None, - zipcode='', citycode=None, - lat=None, lon=None, page_limit=5): + @endpoint( + pattern='(?P.+)?$', + description=_('Addresses list'), + parameters={ + 'id': {'description': _('Address identifier')}, + 'q': {'description': _('Address'), 'example_value': '169 rue du chateau, paris'}, + 'page_limit': { + 'description': _('Maximum number of results to return. Must be ' 'lower than 20.') + }, + 'zipcode': {'description': _('Zipcode'), 'example_value': '75014'}, + 'citycode': {'description': _('INSEE City code')}, + 'lat': { + 'description': _( + 'Prioritize results according to coordinates. "lon" ' 'parameter must also be present.' + ) + }, + 'lon': { + 'description': _( + 'Prioritize results according to coordinates. "lat" ' 'parameter must also be present.' + ) + }, + }, + ) + def addresses( + self, request, id=None, q=None, zipcode='', citycode=None, lat=None, lon=None, page_limit=5 + ): if id is not None: try: address = AddressCacheModel.objects.get(api_id=id) @@ -145,34 +161,47 @@ class BaseAdresse(BaseResource): data = self.format_address_data(feature) result.append(data) address, created = AddressCacheModel.objects.get_or_create( - api_id=data['id'], defaults={'data': data}) + api_id=data['id'], defaults={'data': data} + ) if not created: address.update_timestamp() return {'data': result} - @endpoint(pattern='(?P.+)?$', description=_('Geocoding (Nominatim API)'), - parameters={ - 'q': {'description': _('Address'), 'example_value': '169 rue du chateau, paris'}, - 'zipcode': {'description': _('Zipcode')}, - 'citycode': {'description': _('INSEE City code')}, - 'lat': {'description': _('Prioritize results according to coordinates. "lat" ' - 'parameter must be present.')}, - 'lon': {'description': _('Prioritize results according to coordinates. "lon" ' - 'parameter must be present.')}, - }) + @endpoint( + pattern='(?P.+)?$', + description=_('Geocoding (Nominatim API)'), + parameters={ + 'q': {'description': _('Address'), 'example_value': '169 rue du chateau, paris'}, + 'zipcode': {'description': _('Zipcode')}, + 'citycode': {'description': _('INSEE City code')}, + 'lat': { + 'description': _( + 'Prioritize results according to coordinates. "lat" ' 'parameter must be present.' + ) + }, + 'lon': { + 'description': _( + 'Prioritize results according to coordinates. "lon" ' 'parameter must be present.' + ) + }, + }, + ) def search(self, request, q, zipcode='', citycode=None, lat=None, lon=None, **kwargs): if kwargs.get('format', 'json') != 'json': raise NotImplementedError() - result = self.addresses(request, q=q, zipcode=zipcode, citycode=citycode, - lat=lat, lon=lon, page_limit=1) + result = self.addresses( + request, q=q, zipcode=zipcode, citycode=citycode, lat=lat, lon=lon, page_limit=1 + ) return result['data'] - @endpoint(description=_('Reverse geocoding'), - parameters={ - 'lat': {'description': _('Latitude'), 'example_value': 48.833708}, - 'lon': {'description': _('Longitude'), 'example_value': 2.323349}, - }) + @endpoint( + description=_('Reverse geocoding'), + parameters={ + 'lat': {'description': _('Latitude'), 'example_value': 48.833708}, + 'lon': {'description': _('Longitude'), 'example_value': 2.323349}, + }, + ) def reverse(self, request, lat, lon, **kwargs): if kwargs.get('format', 'json') != 'json': raise NotImplementedError() @@ -196,18 +225,18 @@ class BaseAdresse(BaseResource): break return result - @endpoint(description=_('Streets from zipcode'), - parameters={ - 'id': {'description': _('Street identifier')}, - 'q': {'description': _("Street name")}, - 'zipcode': {'description': _('Zipcode')}, - 'citycode': {'description': _('INSEE City code')}, - 'page_limit': {'description': _('Maximum number of results to return'), - 'example_value': 30}, - 'distinct': {'description': _('Remove duplicate streets')}, - }) - def streets(self, request, zipcode=None, citycode=None, - q=None, id=None, distinct=True, page_limit=None): + @endpoint( + description=_('Streets from zipcode'), + parameters={ + 'id': {'description': _('Street identifier')}, + 'q': {'description': _("Street name")}, + 'zipcode': {'description': _('Zipcode')}, + 'citycode': {'description': _('INSEE City code')}, + 'page_limit': {'description': _('Maximum number of results to return'), 'example_value': 30}, + 'distinct': {'description': _('Remove duplicate streets')}, + }, + ) + def streets(self, request, zipcode=None, citycode=None, q=None, id=None, distinct=True, page_limit=None): result = [] if id is not None: try: @@ -234,29 +263,38 @@ class BaseAdresse(BaseResource): streets = streets[:page_limit] for street in streets: - result.append({'id': str(street.id), - 'text': street.name, - 'type': street.type, - 'city': street.city, - 'citycode': street.citycode, - 'zipcode': street.zipcode}) + result.append( + { + 'id': str(street.id), + 'text': street.name, + 'type': street.type, + 'city': street.city, + 'citycode': street.citycode, + 'zipcode': street.zipcode, + } + ) return {'data': result} - @endpoint(description=_('Cities list'), - parameters={ - 'id': {'description': _('Get exactly one city using its code and postal code ' - 'separated with a dot'), - 'example_value': '75056.75014'}, - 'q': {'description': _("Search text in name or postal code"), - 'example_value': 'Paris'}, - 'code': {'description': _('INSEE code (or multiple codes separated with commas)'), - 'example_value': '75056'}, - 'region_code': {'description': _('Region code'), 'example_value': '11'}, - 'department_code': {'description': _('Department code'), 'example_value': '75'}, - }) - def cities(self, request, id=None, q=None, code=None, region_code=None, - department_code=None): + @endpoint( + description=_('Cities list'), + parameters={ + 'id': { + 'description': _( + 'Get exactly one city using its code and postal code ' 'separated with a dot' + ), + 'example_value': '75056.75014', + }, + 'q': {'description': _("Search text in name or postal code"), 'example_value': 'Paris'}, + 'code': { + 'description': _('INSEE code (or multiple codes separated with commas)'), + 'example_value': '75056', + }, + 'region_code': {'description': _('Region code'), 'example_value': '11'}, + 'department_code': {'description': _('Department code'), 'example_value': '75'}, + }, + ) + def cities(self, request, id=None, q=None, code=None, region_code=None, department_code=None): cities = CityModel.objects.all() if id is not None: @@ -267,8 +305,9 @@ class BaseAdresse(BaseResource): cities = cities.filter(code=code, zipcode=zipcode) if q: unaccented_q = simplify(q) - cities = cities.filter(Q(unaccent_name__istartswith=unaccented_q) | - Q(zipcode__istartswith=unaccented_q)) + cities = cities.filter( + Q(unaccent_name__istartswith=unaccented_q) | Q(zipcode__istartswith=unaccented_q) + ) if code: if ',' in code: codes = [c.strip() for c in code.split(',')] @@ -283,13 +322,14 @@ class BaseAdresse(BaseResource): cities = cities.select_related('department', 'region') return {'data': [city.to_json() for city in cities]} - @endpoint(description=_('Departments list'), - parameters={ - 'id': {'description': _('Get exactly one department using its code'), - 'example_value': '59'}, - 'q': {'description': _('Search text in name or code'), 'example_value': 'Nord'}, - 'region_code': {'description': _('Region code'), 'example_value': '32'}, - }) + @endpoint( + description=_('Departments list'), + parameters={ + 'id': {'description': _('Get exactly one department using its code'), 'example_value': '59'}, + 'q': {'description': _('Search text in name or code'), 'example_value': 'Nord'}, + 'region_code': {'description': _('Region code'), 'example_value': '32'}, + }, + ) def departments(self, request, id=None, q=None, region_code=None): departments = DepartmentModel.objects.all() @@ -297,21 +337,22 @@ class BaseAdresse(BaseResource): departments = departments.filter(code=id) if q: unaccented_q = simplify(q) - departments = departments.filter(Q(unaccent_name__istartswith=unaccented_q) | - Q(code__istartswith=unaccented_q)) + departments = departments.filter( + Q(unaccent_name__istartswith=unaccented_q) | Q(code__istartswith=unaccented_q) + ) if region_code: departments = departments.filter(region__code=region_code) departments = departments.select_related('region') return {'data': [department.to_json() for department in departments]} - @endpoint(description=_('Regions list'), - parameters={ - 'id': {'description': _('Get exactly one region using its code'), - 'example_value': '32'}, - 'q': {'description': _('Search text in name or code'), - 'example_value': 'Hauts-de-France'}, - }) + @endpoint( + description=_('Regions list'), + parameters={ + 'id': {'description': _('Get exactly one region using its code'), 'example_value': '32'}, + 'q': {'description': _('Search text in name or code'), 'example_value': 'Hauts-de-France'}, + }, + ) def regions(self, request, id=None, q=None): regions = RegionModel.objects.all() @@ -319,8 +360,9 @@ class BaseAdresse(BaseResource): regions = regions.filter(code=id) if q: unaccented_q = simplify(q) - regions = regions.filter(Q(unaccent_name__istartswith=unaccented_q) | - Q(code__istartswith=unaccented_q)) + regions = regions.filter( + Q(unaccent_name__istartswith=unaccented_q) | Q(code__istartswith=unaccented_q) + ) return {'data': [region.to_json() for region in regions]} @@ -362,7 +404,10 @@ class BaseAdresse(BaseResource): for department in departments: ban_gz = self.requests.get( - 'https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-{}.ndjson.gz'.format(department)) + 'https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-{}.ndjson.gz'.format( + department + ) + ) if ban_gz.status_code != 200: continue @@ -386,7 +431,8 @@ class BaseAdresse(BaseResource): 'city': street_info['city'], 'zipcode': street_info['postcode'], 'type': street_info['type'], - }) + }, + ) if line is _not_found: raise Exception('bano file is empty') @@ -409,8 +455,7 @@ class BaseAdresse(BaseResource): except ValueError: error = 'invalid json, got: %s' % response.text if error: - self.logger.error('failed to update api geo data for endpoint %s: %s', - endpoint, error) + self.logger.error('failed to update api geo data for endpoint %s: %s', endpoint, error) return if not result: raise Exception('api geo returns empty json') @@ -449,8 +494,7 @@ class BaseAdresse(BaseResource): defaults['department'] = DepartmentModel.objects.get(code=data['codeDepartement']) if data.get('codeRegion'): defaults['region'] = RegionModel.objects.get(code=data['codeRegion']) - CityModel.objects.update_or_create( - code=data['code'], zipcode=zipcode, defaults=defaults) + CityModel.objects.update_or_create(code=data['code'], zipcode=zipcode, defaults=defaults) CityModel.objects.filter(last_update__lt=start_update).delete() def clean_addresses_cache(self): @@ -478,7 +522,6 @@ class BaseAdresse(BaseResource): class UnaccentNameMixin(object): - def save(self, *args, **kwargs): self.unaccent_name = simplify(self.name) super(UnaccentNameMixin, self).save(*args, **kwargs) diff --git a/passerelle/apps/bdp/admin.py b/passerelle/apps/bdp/admin.py index c215dff4..5336245e 100644 --- a/passerelle/apps/bdp/admin.py +++ b/passerelle/apps/bdp/admin.py @@ -6,4 +6,5 @@ from passerelle.apps.bdp.models import Bdp class BdpAdmin(admin.ModelAdmin): prepopulated_fields = {'slug': ('title',)} + admin.site.register(Bdp, BdpAdmin) diff --git a/passerelle/apps/bdp/migrations/0001_initial.py b/passerelle/apps/bdp/migrations/0001_initial.py index 5f8604d8..503e736a 100644 --- a/passerelle/apps/bdp/migrations/0001_initial.py +++ b/passerelle/apps/bdp/migrations/0001_initial.py @@ -14,16 +14,41 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Bdp', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('service_url', models.CharField(help_text='BDP Web Service URL', max_length=128, verbose_name='Service URL')), + ( + 'service_url', + models.CharField( + help_text='BDP Web Service URL', max_length=128, verbose_name='Service URL' + ), + ), ('username', models.CharField(max_length=128, verbose_name='Username', blank=True)), ('password', models.CharField(max_length=128, verbose_name='Password', blank=True)), - ('verify_cert', models.BooleanField(default=True, verbose_name='Check HTTPS Certificate validity')), - ('keystore', models.FileField(help_text='Certificate and private key in PEM format', upload_to=b'bdp', null=True, verbose_name='Keystore', blank=True)), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_bdp_users_+', related_query_name='+', blank=True)), + ( + 'verify_cert', + models.BooleanField(default=True, verbose_name='Check HTTPS Certificate validity'), + ), + ( + 'keystore', + models.FileField( + help_text='Certificate and private key in PEM format', + upload_to=b'bdp', + null=True, + verbose_name='Keystore', + blank=True, + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_bdp_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': 'BDP Web Service', diff --git a/passerelle/apps/bdp/migrations/0002_bdp_log_level.py b/passerelle/apps/bdp/migrations/0002_bdp_log_level.py index ff40b582..ade9793b 100644 --- a/passerelle/apps/bdp/migrations/0002_bdp_log_level.py +++ b/passerelle/apps/bdp/migrations/0002_bdp_log_level.py @@ -14,7 +14,13 @@ class Migration(migrations.Migration): migrations.AddField( model_name='bdp', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Debug Enabled', + blank=True, + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/bdp/migrations/0003_auto_20160316_0910.py b/passerelle/apps/bdp/migrations/0003_auto_20160316_0910.py index 96109bd0..0058f98e 100644 --- a/passerelle/apps/bdp/migrations/0003_auto_20160316_0910.py +++ b/passerelle/apps/bdp/migrations/0003_auto_20160316_0910.py @@ -14,7 +14,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='bdp', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/bdp/migrations/0004_auto_20160407_0456.py b/passerelle/apps/bdp/migrations/0004_auto_20160407_0456.py index 0f6d96ef..8a75b1a5 100644 --- a/passerelle/apps/bdp/migrations/0004_auto_20160407_0456.py +++ b/passerelle/apps/bdp/migrations/0004_auto_20160407_0456.py @@ -14,7 +14,19 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='bdp', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/bdp/models.py b/passerelle/apps/bdp/models.py index e724bb61..411a2d55 100644 --- a/passerelle/apps/bdp/models.py +++ b/passerelle/apps/bdp/models.py @@ -8,20 +8,21 @@ from django.utils.translation import ugettext_lazy as _ from passerelle.base.models import BaseResource + class Bdp(BaseResource): - service_url = models.CharField(max_length=128, blank=False, - verbose_name=_('Service URL'), - help_text=_('BDP Web Service URL')) - username = models.CharField(max_length=128, blank=True, - verbose_name=_('Username')) - password = models.CharField(max_length=128, blank=True, - verbose_name=_('Password')) - verify_cert = models.BooleanField(default=True, - verbose_name=_('Check HTTPS Certificate validity')) - keystore = models.FileField(upload_to='bdp', - blank=True, null=True, - verbose_name=_('Keystore'), - help_text=_('Certificate and private key in PEM format')) + service_url = models.CharField( + max_length=128, blank=False, verbose_name=_('Service URL'), help_text=_('BDP Web Service URL') + ) + username = models.CharField(max_length=128, blank=True, verbose_name=_('Username')) + password = models.CharField(max_length=128, blank=True, verbose_name=_('Password')) + verify_cert = models.BooleanField(default=True, verbose_name=_('Check HTTPS Certificate validity')) + keystore = models.FileField( + upload_to='bdp', + blank=True, + null=True, + verbose_name=_('Keystore'), + help_text=_('Certificate and private key in PEM format'), + ) category = _('Business Process Connectors') @@ -40,16 +41,13 @@ class Bdp(BaseResource): def get_api(self, endpoint, **params): options = self.requests_options() - return requests.get(self.service_url + '/api/' + endpoint, - params=params, **options).json() + return requests.get(self.service_url + '/api/' + endpoint, params=params, **options).json() def post_api(self, endpoint, obj): data = json.dumps(obj) headers = {'Content-Type': 'application/json'} options = self.requests_options() - request = requests.post( - self.service_url + '/api/' + endpoint, - data=data, headers=headers, **options) + request = requests.post(self.service_url + '/api/' + endpoint, data=data, headers=headers, **options) result = { 'status_code': request.status_code, 'x_request_id': request.headers.get('x-request-id'), diff --git a/passerelle/apps/bdp/urls.py b/passerelle/apps/bdp/urls.py index 0bd8cfe1..174db442 100644 --- a/passerelle/apps/bdp/urls.py +++ b/passerelle/apps/bdp/urls.py @@ -5,5 +5,9 @@ from .views import BdpDetailView, ResourcesView, PostAdherentView urlpatterns = [ url(r'^(?P[\w,-]+)/$', BdpDetailView.as_view(), name='bdp-view'), url(r'^(?P[\w,-]+)/(?P[\w,-]+)/$', ResourcesView.as_view(), name='bdp-resources'), - url(r'^(?P[\w,-]+)/post/adherent/$', csrf_exempt(PostAdherentView.as_view()), name='bdp-post-adherent'), + url( + r'^(?P[\w,-]+)/post/adherent/$', + csrf_exempt(PostAdherentView.as_view()), + name='bdp-post-adherent', + ), ] diff --git a/passerelle/apps/bdp/views.py b/passerelle/apps/bdp/views.py index 9fea8b8c..947e58cd 100644 --- a/passerelle/apps/bdp/views.py +++ b/passerelle/apps/bdp/views.py @@ -40,16 +40,20 @@ class PostAdherentView(View, SingleObjectMixin): @utils.protected_api('can_access') @utils.to_json() def post(self, request, *args, **kwargs): - data = json_loads(request.body) # JSON w.c.s. formdata + data = json_loads(request.body) # JSON w.c.s. formdata date_de_naissance = data['fields'].get('date_de_naissance') # force 1973-04-18T00:00:00Z date_de_naissance = date_de_naissance[:10] + 'T00:00:00Z' - abonnements = data['fields'].get('abonnements_raw') or \ - data['fields'].get('abonnements_raw') or \ - request.GET.get('abonnements') - bibliotheque_id = data['fields'].get('bibliotheque_raw') or \ - data['fields'].get('bibliotheque') or \ - request.GET.get('bibliotheque') + abonnements = ( + data['fields'].get('abonnements_raw') + or data['fields'].get('abonnements_raw') + or request.GET.get('abonnements') + ) + bibliotheque_id = ( + data['fields'].get('bibliotheque_raw') + or data['fields'].get('bibliotheque') + or request.GET.get('bibliotheque') + ) adherent = { 'nom': data['fields'].get('nom'), 'prenom': data['fields'].get('prenom'), diff --git a/passerelle/apps/cartads_cs/migrations/0001_initial.py b/passerelle/apps/cartads_cs/migrations/0001_initial.py index 29d6b2a5..41bc8675 100644 --- a/passerelle/apps/cartads_cs/migrations/0001_initial.py +++ b/passerelle/apps/cartads_cs/migrations/0001_initial.py @@ -18,11 +18,19 @@ class Migration(migrations.Migration): migrations.CreateModel( name='CartaDSCS', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), - ('wsdl_base_url', models.URLField(help_text='ex: https://example.net/adscs/webservices/', verbose_name='WSDL Base URL')), + ( + 'wsdl_base_url', + models.URLField( + help_text='ex: https://example.net/adscs/webservices/', verbose_name='WSDL Base URL' + ), + ), ('username', models.CharField(max_length=64, verbose_name='Username')), ('password', models.CharField(max_length=64, verbose_name='Password')), ('iv', models.CharField(max_length=16, verbose_name='Initialisation Vector')), @@ -31,7 +39,15 @@ class Migration(migrations.Migration): ('ftp_username', models.CharField(max_length=64, verbose_name='FTP Username')), ('ftp_password', models.CharField(max_length=64, verbose_name='FTP Password')), ('ftp_client_name', models.CharField(max_length=64, verbose_name='FTP Client Name')), - ('users', models.ManyToManyField(blank=True, related_name='_cartadscs_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_cartadscs_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'Cart@DS CS', @@ -40,7 +56,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='CartaDSDossier', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('email', models.CharField(max_length=256)), ('tracking_code', models.CharField(max_length=20)), ('commune_id', models.CharField(max_length=20)), @@ -59,10 +78,16 @@ class Migration(migrations.Migration): migrations.CreateModel( name='CartaDSFile', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('tracking_code', models.CharField(max_length=20)), ('id_piece', models.CharField(max_length=20)), - ('uploaded_file', models.FileField(upload_to=passerelle.apps.cartads_cs.models.cartads_file_location)), + ( + 'uploaded_file', + models.FileField(upload_to=passerelle.apps.cartads_cs.models.cartads_file_location), + ), ('last_update_datetime', models.DateTimeField(auto_now=True)), ], ), diff --git a/passerelle/apps/cartads_cs/migrations/0002_cartadsdatacache.py b/passerelle/apps/cartads_cs/migrations/0002_cartadsdatacache.py index d1b1947d..2c83fb82 100644 --- a/passerelle/apps/cartads_cs/migrations/0002_cartadsdatacache.py +++ b/passerelle/apps/cartads_cs/migrations/0002_cartadsdatacache.py @@ -16,7 +16,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='CartaDSDataCache', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('data_type', models.CharField(max_length=50)), ('data_parameters', django.contrib.postgres.fields.jsonb.JSONField(default={})), ('data_values', django.contrib.postgres.fields.jsonb.JSONField(default={})), diff --git a/passerelle/apps/cartads_cs/migrations/0006_cartadscs_client_name.py b/passerelle/apps/cartads_cs/migrations/0006_cartadscs_client_name.py index 4c2a73ed..58ec02f9 100644 --- a/passerelle/apps/cartads_cs/migrations/0006_cartadscs_client_name.py +++ b/passerelle/apps/cartads_cs/migrations/0006_cartadscs_client_name.py @@ -15,6 +15,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='cartadscs', name='client_name', - field=models.CharField(blank=True, help_text='Only useful in shared environments.', max_length=64, null=True, verbose_name='Client Name'), + field=models.CharField( + blank=True, + help_text='Only useful in shared environments.', + max_length=64, + null=True, + verbose_name='Client Name', + ), ), ] diff --git a/passerelle/apps/cartads_cs/migrations/0007_auto_20190923_1711.py b/passerelle/apps/cartads_cs/migrations/0007_auto_20190923_1711.py index 0e6f8d38..4f02ccea 100644 --- a/passerelle/apps/cartads_cs/migrations/0007_auto_20190923_1711.py +++ b/passerelle/apps/cartads_cs/migrations/0007_auto_20190923_1711.py @@ -15,7 +15,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='CartaDSSubscriber', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('name_id', models.CharField(max_length=32, null=True)), ], ), diff --git a/passerelle/apps/cartads_cs/models.py b/passerelle/apps/cartads_cs/models.py index d26002cb..5ac14102 100644 --- a/passerelle/apps/cartads_cs/models.py +++ b/passerelle/apps/cartads_cs/models.py @@ -106,12 +106,18 @@ class CartaDSDossier(models.Model): class AbstractCartaDSCS(BaseResource): - wsdl_base_url = models.URLField(_('WSDL Base URL'), - help_text=_('ex: https://example.net/adscs/webservices/')) + wsdl_base_url = models.URLField( + _('WSDL Base URL'), help_text=_('ex: https://example.net/adscs/webservices/') + ) username = models.CharField(_('Username'), max_length=64) password = models.CharField(_('Password'), max_length=64) - client_name = models.CharField(_('Client Name'), max_length=64, blank=True, null=True, - help_text=_('Only useful in shared environments.')) + client_name = models.CharField( + _('Client Name'), + max_length=64, + blank=True, + null=True, + help_text=_('Only useful in shared environments.'), + ) iv = models.CharField(_('Initialisation Vector'), max_length=16) secret_key = models.CharField(_('Secret Key'), max_length=16) ftp_server = models.CharField(_('FTP Server'), max_length=128) @@ -143,7 +149,8 @@ class AbstractCartaDSCS(BaseResource): parsed_wsdl_address = urlparse.urlparse(client.service._binding_options['address']) parsed_real_address = urlparse.urlparse(self.wsdl_base_url) client.service._binding_options['address'] = urlparse.urlunparse( - parsed_real_address[:2] + parsed_wsdl_address[2:]) + parsed_real_address[:2] + parsed_wsdl_address[2:] + ) return client def get_token(self): @@ -157,17 +164,14 @@ class AbstractCartaDSCS(BaseResource): token_data_str = json.dumps(token_data) data_pad = AES.block_size - len(token_data_str) % AES.block_size aes = AES.new(self.secret_key, AES.MODE_CBC, self.iv) - token = aes.encrypt(token_data_str + (chr(data_pad)*data_pad)) + token = aes.encrypt(token_data_str + (chr(data_pad) * data_pad)) return force_text(base64.encodebytes(token)).replace('\n', '').rstrip('=') def check_status(self): self.soap_client().service.GetCommunes(self.get_token(), {}) # description of common endpoint parameters - COMMUNE_ID_PARAM = { - 'description': _('Identifier of collectivity'), - 'example_value': '2' - } + COMMUNE_ID_PARAM = {'description': _('Identifier of collectivity'), 'example_value': '2'} TYPE_DOSSIER_ID_PARAM = { 'description': _('Identifier of file type'), 'example_value': 'CU', @@ -208,8 +212,8 @@ class AbstractCartaDSCS(BaseResource): if resp is None: continue data_cache, created = CartaDSDataCache.objects.get_or_create( - data_type='types_dossier', - data_parameters={'commune_id': int(commune['id'])}) + data_type='types_dossier', data_parameters={'commune_id': int(commune['id'])} + ) data_cache.data_values = {'data': [{'id': str(x['Key']), 'text': x['Value']} for x in resp]} types_dossier_ids.update({x['id']: True for x in data_cache.data_values['data']}) data_cache.save() @@ -222,12 +226,13 @@ class AbstractCartaDSCS(BaseResource): if resp is None: continue data_cache, created = CartaDSDataCache.objects.get_or_create( - data_type='objets_demande', - data_parameters={'type_dossier_id': type_dossier_id}) + data_type='objets_demande', data_parameters={'type_dossier_id': type_dossier_id} + ) data_cache.data_values = {'data': [{'id': str(x['Key']), 'text': x['Value']} for x in resp]} objets_demande_ids.update({x['id']: True for x in data_cache.data_values['data']}) types_dossiers_objets_demandes_tuples.extend( - [(type_dossier_id, x['id']) for x in data_cache.data_values['data']]) + [(type_dossier_id, x['id']) for x in data_cache.data_values['data']] + ) data_cache.save() # liste_pdf @@ -236,8 +241,9 @@ class AbstractCartaDSCS(BaseResource): os.makedirs(pdfs_path) for type_compte in [1]: for type_dossier_id in types_dossier_ids.keys(): - resp = client.service.GetListePdf(self.get_token(), type_dossier_id, - {'TypeCompteUtilisateur': type_compte}) + resp = client.service.GetListePdf( + self.get_token(), type_dossier_id, {'TypeCompteUtilisateur': type_compte} + ) if resp is None: continue @@ -250,16 +256,22 @@ class AbstractCartaDSCS(BaseResource): return u'%(Nom)s' % x data_cache, created = CartaDSDataCache.objects.get_or_create( - data_type='liste_pdf', - data_parameters={ - 'type_dossier_id': type_dossier_id, - 'type_compte': type_compte, - }) - data_cache.data_values = {'data': [ - {'id': x['Identifiant'], - 'text': format_cerfa_label(x), - 'url': x['UrlTelechargement'], - } for x in resp or []]} + data_type='liste_pdf', + data_parameters={ + 'type_dossier_id': type_dossier_id, + 'type_compte': type_compte, + }, + ) + data_cache.data_values = { + 'data': [ + { + 'id': x['Identifiant'], + 'text': format_cerfa_label(x), + 'url': x['UrlTelechargement'], + } + for x in resp or [] + ] + } for value in data_cache.data_values['data']: filepath = os.path.join(default_storage.path(self.pdf_path(value))) @@ -276,21 +288,27 @@ class AbstractCartaDSCS(BaseResource): if resp is None: continue data_cache, created = CartaDSDataCache.objects.get_or_create( - data_type='pieces', - data_parameters={ - 'type_dossier_id': type_dossier_id, - 'objet_demande_id': str(objet_demande_id), - }) + data_type='pieces', + data_parameters={ + 'type_dossier_id': type_dossier_id, + 'objet_demande_id': str(objet_demande_id), + }, + ) if resp is not None: - data_cache.data_values = {'data': [ - {'id': str(x['IdPiece']), - 'text': x['Libelle'], - 'description': x['Descriptif'], - 'codePiece': x['CodePiece'], - 'reglementaire': x['Reglementaire'], - 'files': [], - 'max_files': 6, - } for x in resp]} + data_cache.data_values = { + 'data': [ + { + 'id': str(x['IdPiece']), + 'text': x['Libelle'], + 'description': x['Descriptif'], + 'codePiece': x['CodePiece'], + 'reglementaire': x['Reglementaire'], + 'files': [], + 'max_files': 6, + } + for x in resp + ] + } data_cache.save() def get_dossier_steps(self, client, token, dossier): @@ -319,15 +337,19 @@ class AbstractCartaDSCS(BaseResource): self.logger.exception('error getting etapes of dossier (%s) (%s)', dossier.id, e) try: dossier.cartads_cache_code_acces = client_suivi.service.GetMotPasse( - self.get_token(), dossier.cartads_id_dossier) + self.get_token(), dossier.cartads_id_dossier + ) except zeep.exceptions.Fault as e: self.logger.exception('error getting access code (%s) (%s)', dossier.id, e) try: - infos_dossier = client_dossier.service.GetInfosDossier(self.get_token(), dossier.cartads_id_dossier) + infos_dossier = client_dossier.service.GetInfosDossier( + self.get_token(), dossier.cartads_id_dossier + ) if infos_dossier: # load(dump(...)) to serialize dates - dossier.cartads_cache_infos = json.loads(json.dumps( - zeep_helpers.serialize_object(infos_dossier), cls=JSONEncoder)) + dossier.cartads_cache_infos = json.loads( + json.dumps(zeep_helpers.serialize_object(infos_dossier), cls=JSONEncoder) + ) except zeep.exceptions.Fault as e: self.logger.exception('error getting dossier infos (%s) (%s)', dossier.id, e) dossier.save() @@ -364,49 +386,54 @@ class AbstractCartaDSCS(BaseResource): return info['text'] return None - @endpoint(description=_('Get list of file types'), - parameters={ - 'commune_id': COMMUNE_ID_PARAM, - 'filter': { - 'description': _('List of types to include (separated by commas)'), - 'example_value': 'CU,OP', - }, - }) + @endpoint( + description=_('Get list of file types'), + parameters={ + 'commune_id': COMMUNE_ID_PARAM, + 'filter': { + 'description': _('List of types to include (separated by commas)'), + 'example_value': 'CU,OP', + }, + }, + ) def types_dossier(self, request, commune_id, filter=None): cache = CartaDSDataCache.objects.get( - data_type='types_dossier', - data_parameters={'commune_id': int(commune_id)}) + data_type='types_dossier', data_parameters={'commune_id': int(commune_id)} + ) response = cache.data_values if filter: filter_list = filter.split(',') response['data'] = [x for x in response['data'] if x['id'] in filter_list] return response - @endpoint(description=_('Get list of demand subjects'), - parameters={'type_dossier_id': TYPE_DOSSIER_ID_PARAM}, - ) + @endpoint( + description=_('Get list of demand subjects'), + parameters={'type_dossier_id': TYPE_DOSSIER_ID_PARAM}, + ) def objets_demande(self, request, type_dossier_id): cache = CartaDSDataCache.objects.get( - data_type='objets_demande', - data_parameters={'type_dossier_id': type_dossier_id}) + data_type='objets_demande', data_parameters={'type_dossier_id': type_dossier_id} + ) return cache.data_values - @endpoint(description=_('Get list of CERFA documents'), - parameters={ - 'type_dossier_id': TYPE_DOSSIER_ID_PARAM, - 'type_compte': {'description': _('Type of account')}, - }) + @endpoint( + description=_('Get list of CERFA documents'), + parameters={ + 'type_dossier_id': TYPE_DOSSIER_ID_PARAM, + 'type_compte': {'description': _('Type of account')}, + }, + ) def liste_pdf(self, request, type_dossier_id, type_compte=1): cache = CartaDSDataCache.objects.get( - data_type='liste_pdf', - data_parameters={ - 'type_dossier_id': type_dossier_id, - 'type_compte': type_compte, - }) + data_type='liste_pdf', + data_parameters={ + 'type_dossier_id': type_dossier_id, + 'type_compte': type_compte, + }, + ) if request: # point to local documents cache for pdf in cache.data_values['data']: - pdf['url'] = request.build_absolute_uri( - os.path.join(settings.MEDIA_URL, self.pdf_path(pdf))) + pdf['url'] = request.build_absolute_uri(os.path.join(settings.MEDIA_URL, self.pdf_path(pdf))) return cache.data_values def pdf_path(self, pdf): @@ -416,50 +443,55 @@ class AbstractCartaDSCS(BaseResource): filename = '%s.pdf' % pdf['id'] return os.path.join('public/cartads_cs', self.slug, 'documents', filename) - - @endpoint(perm='can_access', - description=_('Get list of file items'), - parameters={ - 'type_dossier_id': TYPE_DOSSIER_ID_PARAM, - 'objet_demande_id': OBJET_DEMANDE_ID_PARAM, - 'tracking_code': TRACKING_CODE_PARAM, - 'demolitions': { - 'description': _('Include items for demolition work'), - 'example_value': 'false', - 'type': 'bool', - }, - }) + @endpoint( + perm='can_access', + description=_('Get list of file items'), + parameters={ + 'type_dossier_id': TYPE_DOSSIER_ID_PARAM, + 'objet_demande_id': OBJET_DEMANDE_ID_PARAM, + 'tracking_code': TRACKING_CODE_PARAM, + 'demolitions': { + 'description': _('Include items for demolition work'), + 'example_value': 'false', + 'type': 'bool', + }, + }, + ) def pieces(self, request, type_dossier_id, objet_demande_id, tracking_code, demolitions=True): cache, created = CartaDSDataCache.objects.get_or_create( - data_type='pieces', - data_parameters={ - 'type_dossier_id': type_dossier_id, - 'objet_demande_id': objet_demande_id, - }) + data_type='pieces', + data_parameters={ + 'type_dossier_id': type_dossier_id, + 'objet_demande_id': objet_demande_id, + }, + ) signer = Signer(salt='cart@ds_cs') upload_token = signer.sign(tracking_code) cerfa_pieces = [ - {'id': 'cerfa-%s-%s' % (type_dossier_id, objet_demande_id), - 'text': 'Cerfa rempli', - 'description': '', - 'codePiece': '', - 'reglementaire': True, - 'files': [], - 'max_files': 1, - 'section_start': 'Cerfa', + { + 'id': 'cerfa-%s-%s' % (type_dossier_id, objet_demande_id), + 'text': 'Cerfa rempli', + 'description': '', + 'codePiece': '', + 'reglementaire': True, + 'files': [], + 'max_files': 1, + 'section_start': 'Cerfa', + }, + { + 'id': 'cerfa-autres-%s-%s' % (type_dossier_id, objet_demande_id), + 'text': 'Cerfa demandeurs complémentaires', + 'description': '', + 'codePiece': '', + 'reglementaire': False, + 'files': [], + 'max_files': 6, }, - {'id': 'cerfa-autres-%s-%s' % (type_dossier_id, objet_demande_id), - 'text': 'Cerfa demandeurs complémentaires', - 'description': '', - 'codePiece': '', - 'reglementaire': False, - 'files': [], - 'max_files': 6, - } ] pieces = cache.data_values['data'] if cache.data_values else [] if demolitions is False: + def is_demolition_piece(piece): if piece['reglementaire']: return False @@ -467,6 +499,7 @@ class AbstractCartaDSCS(BaseResource): if re.match(r'^%s\d' % demolition_prefix, piece['codePiece']): return True return False + pieces = [x for x in pieces if not is_demolition_piece(x)] required_pieces = [x for x in pieces if x['reglementaire']] @@ -480,10 +513,9 @@ class AbstractCartaDSCS(BaseResource): for piece in pieces: if request: - upload_url = request.build_absolute_uri('%supload/%s/%s/' % ( - self.get_absolute_url(), - piece['id'], - upload_token)) + upload_url = request.build_absolute_uri( + '%supload/%s/%s/' % (self.get_absolute_url(), piece['id'], upload_token) + ) else: upload_url = None piece['files'] = [ @@ -492,18 +524,23 @@ class AbstractCartaDSCS(BaseResource): 'name': os.path.basename(x.uploaded_file.name), 'token': signer.sign(str(x.id)), 'id': x.id, - } for x in known_files if x.id_piece == str(piece['id'])] + } + for x in known_files + if x.id_piece == str(piece['id']) + ] if len(piece['files']) < piece['max_files']: piece['files'].append({'url': upload_url}) return {'data': pieces} - @endpoint(perm='can_access', - description=_('Check list of file items'), - parameters={ - 'type_dossier_id': TYPE_DOSSIER_ID_PARAM, - 'objet_demande_id': OBJET_DEMANDE_ID_PARAM, - 'tracking_code': TRACKING_CODE_PARAM, - }) + @endpoint( + perm='can_access', + description=_('Check list of file items'), + parameters={ + 'type_dossier_id': TYPE_DOSSIER_ID_PARAM, + 'objet_demande_id': OBJET_DEMANDE_ID_PARAM, + 'tracking_code': TRACKING_CODE_PARAM, + }, + ) def check_pieces(self, request, type_dossier_id, objet_demande_id, tracking_code): pieces = self.pieces(request, type_dossier_id, objet_demande_id, tracking_code) result = True @@ -515,11 +552,13 @@ class AbstractCartaDSCS(BaseResource): break return {'result': result} - @endpoint(perm='can_access', - description=_('Get list of additional file items'), - parameters={ - 'tracking_code': TRACKING_CODE_PARAM, - }) + @endpoint( + perm='can_access', + description=_('Get list of additional file items'), + parameters={ + 'tracking_code': TRACKING_CODE_PARAM, + }, + ) def additional_pieces(self, request, tracking_code): client = self.soap_client(wsdl_url=self.get_wsdl_url('ServicePiece')) dossier = CartaDSDossier.objects.get(tracking_code=tracking_code) @@ -531,31 +570,35 @@ class AbstractCartaDSCS(BaseResource): upload_token = signer.sign(tracking_code) pieces = [ - {'id': 'comp-%s-%s' % (x['IdDosPiece'], x['IdPiece']), - 'text': x['LibellePiece'], - 'description': x['Descriptif'], - 'codePiece': x['CodePiece'], - 'files': [], - 'max_files': 6, - } for x in resp] + { + 'id': 'comp-%s-%s' % (x['IdDosPiece'], x['IdPiece']), + 'text': x['LibellePiece'], + 'description': x['Descriptif'], + 'codePiece': x['CodePiece'], + 'files': [], + 'max_files': 6, + } + for x in resp + ] for piece in pieces: if request: - upload_url = request.build_absolute_uri('%supload/%s/%s/' % ( - self.get_absolute_url(), - piece['id'], - upload_token)) + upload_url = request.build_absolute_uri( + '%supload/%s/%s/' % (self.get_absolute_url(), piece['id'], upload_token) + ) else: upload_url = None piece['files'] = [] piece['files'].append({'url': upload_url}) return {'data': pieces} - @endpoint(perm='can_access', - description=_('Get list of DOC file items'), - parameters={ - 'tracking_code': TRACKING_CODE_PARAM, - }) + @endpoint( + perm='can_access', + description=_('Get list of DOC file items'), + parameters={ + 'tracking_code': TRACKING_CODE_PARAM, + }, + ) def doc_pieces(self, request, tracking_code): dossier = CartaDSDossier.objects.get(tracking_code=tracking_code) status = self.status(request, dossier.id) @@ -565,31 +608,33 @@ class AbstractCartaDSCS(BaseResource): signer = Signer(salt='cart@ds_cs') upload_token = signer.sign(tracking_code) pieces = [ - {'id': 'cerfa-doc', - 'text': 'CERFA', - 'description': '', - 'codePiece': '', - 'reglementaire': True, - 'files': [], - 'max_files': 1, + { + 'id': 'cerfa-doc', + 'text': 'CERFA', + 'description': '', + 'codePiece': '', + 'reglementaire': True, + 'files': [], + 'max_files': 1, }, ] for piece in pieces: if request: - upload_url = request.build_absolute_uri('%supload/%s/%s/' % ( - self.get_absolute_url(), - piece['id'], - upload_token)) + upload_url = request.build_absolute_uri( + '%supload/%s/%s/' % (self.get_absolute_url(), piece['id'], upload_token) + ) else: upload_url = None piece['files'].append({'url': upload_url}) return {'data': pieces} - @endpoint(perm='can_access', - description=_('Get list of DAACT file items'), - parameters={ - 'tracking_code': TRACKING_CODE_PARAM, - }) + @endpoint( + perm='can_access', + description=_('Get list of DAACT file items'), + parameters={ + 'tracking_code': TRACKING_CODE_PARAM, + }, + ) def daact_pieces(self, request, tracking_code): dossier = CartaDSDossier.objects.get(tracking_code=tracking_code) status = self.status(request, dossier.id) @@ -603,44 +648,51 @@ class AbstractCartaDSCS(BaseResource): upload_token = signer.sign(tracking_code) pieces = [ - {'id': 'cerfa-daact', - 'text': 'CERFA', - 'description': '', - 'codePiece': '', - 'reglementaire': True, - 'files': [], - 'max_files': 1, + { + 'id': 'cerfa-daact', + 'text': 'CERFA', + 'description': '', + 'codePiece': '', + 'reglementaire': True, + 'files': [], + 'max_files': 1, }, ] - pieces.extend([ - {'id': 'daact-%s' % x['IdPiece'], - 'text': x['LibellePiece'], - 'description': x['Descriptif'], - 'codePiece': x['CodePiece'], - 'files': [], - 'max_files': 6, - } for x in resp]) + pieces.extend( + [ + { + 'id': 'daact-%s' % x['IdPiece'], + 'text': x['LibellePiece'], + 'description': x['Descriptif'], + 'codePiece': x['CodePiece'], + 'files': [], + 'max_files': 6, + } + for x in resp + ] + ) for piece in pieces: if request: - upload_url = request.build_absolute_uri('%supload/%s/%s/' % ( - self.get_absolute_url(), - piece['id'], - upload_token)) + upload_url = request.build_absolute_uri( + '%supload/%s/%s/' % (self.get_absolute_url(), piece['id'], upload_token) + ) else: upload_url = None piece['files'] = [] piece['files'].append({'url': upload_url}) return {'data': pieces} - @endpoint(methods=['post'], - pattern='^(?P[\w-]+)/(?P[\w:_-]+)/$', - description=_('Upload a single document file'), - parameters={ - 'id_piece': PIECE_ID_PARAM, - 'token': UPLOAD_TOKEN_PARAM, - }) + @endpoint( + methods=['post'], + pattern='^(?P[\w-]+)/(?P[\w:_-]+)/$', + description=_('Upload a single document file'), + parameters={ + 'id_piece': PIECE_ID_PARAM, + 'token': UPLOAD_TOKEN_PARAM, + }, + ) def upload(self, request, id_piece, token, **kwargs): if not request.FILES.get('files[]'): # silently ignore request without files @@ -660,24 +712,29 @@ class AbstractCartaDSCS(BaseResource): if request.FILES['files[]'].size > 25 * 1024 * 1024: return [{'error': force_text(_('The file should not exceed 25MB.'))}] file_upload = CartaDSFile( - tracking_code=tracking_code, - id_piece=id_piece, - uploaded_file=request.FILES['files[]']) + tracking_code=tracking_code, id_piece=id_piece, uploaded_file=request.FILES['files[]'] + ) file_upload.save() - return [{'name': os.path.basename(file_upload.uploaded_file.name), - 'token': signer.sign(str(file_upload.id))}] + return [ + { + 'name': os.path.basename(file_upload.uploaded_file.name), + 'token': signer.sign(str(file_upload.id)), + } + ] - @endpoint(methods=['post'], - name='upload', - pattern='^(?P[\w-]+)/(?P[\w:_-]+)/(?P[\w:_-]+)/delete/$', - description=_('Delete a single document file'), - parameters={ - 'id_piece': PIECE_ID_PARAM, - 'token': UPLOAD_TOKEN_PARAM, - 'file_upload': { - 'description': _('Signed identifier of single document upload'), - }, - }) + @endpoint( + methods=['post'], + name='upload', + pattern='^(?P[\w-]+)/(?P[\w:_-]+)/(?P[\w:_-]+)/delete/$', + description=_('Delete a single document file'), + parameters={ + 'id_piece': PIECE_ID_PARAM, + 'token': UPLOAD_TOKEN_PARAM, + 'file_upload': { + 'description': _('Signed identifier of single document upload'), + }, + }, + ) def upload_delete(self, request, id_piece, token, file_upload, **kwargs): # this cannot be verb DELETE as we have no way to set # Access-Control-Allow-Methods @@ -686,41 +743,53 @@ class AbstractCartaDSCS(BaseResource): CartaDSFile.objects.filter(id=signer.unsign(file_upload)).delete() return {'err': 0} - @endpoint(perm='can_access', - description=_('Validate and send a file'), - parameters={ - 'commune_id': COMMUNE_ID_PARAM, - 'type_dossier_id': TYPE_DOSSIER_ID_PARAM, - 'objet_demande_id': OBJET_DEMANDE_ID_PARAM, - 'tracking_code': TRACKING_CODE_PARAM, - 'email': { - 'description': _('Email of requester'), - }, - 'name_id': { - 'description': _('UUID of requester'), - }, - 'formdata_url': { - 'description': _('URL of user form'), - }, - }) - def send(self, request, commune_id, type_dossier_id, objet_demande_id, - tracking_code, email, name_id=None, formdata_url=None): + @endpoint( + perm='can_access', + description=_('Validate and send a file'), + parameters={ + 'commune_id': COMMUNE_ID_PARAM, + 'type_dossier_id': TYPE_DOSSIER_ID_PARAM, + 'objet_demande_id': OBJET_DEMANDE_ID_PARAM, + 'tracking_code': TRACKING_CODE_PARAM, + 'email': { + 'description': _('Email of requester'), + }, + 'name_id': { + 'description': _('UUID of requester'), + }, + 'formdata_url': { + 'description': _('URL of user form'), + }, + }, + ) + def send( + self, + request, + commune_id, + type_dossier_id, + objet_demande_id, + tracking_code, + email, + name_id=None, + formdata_url=None, + ): dossier = CartaDSDossier( - commune_id=commune_id, - type_dossier_id=type_dossier_id, - objet_demande_id=objet_demande_id, - tracking_code=tracking_code, - email=email, - formdata_url=formdata_url - ) + commune_id=commune_id, + type_dossier_id=type_dossier_id, + objet_demande_id=objet_demande_id, + tracking_code=tracking_code, + email=email, + formdata_url=formdata_url, + ) dossier.save() signer = Signer(salt='cart@ds_cs/dossier') - notification_base_url = reverse('generic-endpoint', kwargs={ - 'connector': self.get_connector_slug(), - 'slug': self.slug, - 'endpoint': 'notification'}) + notification_base_url = reverse( + 'generic-endpoint', + kwargs={'connector': self.get_connector_slug(), 'slug': self.slug, 'endpoint': 'notification'}, + ) dossier.notification_url = request.build_absolute_uri( - notification_base_url + '/%s/' % signer.sign(str(dossier.id))) + notification_base_url + '/%s/' % signer.sign(str(dossier.id)) + ) dossier.save() if name_id: dossier.subscribers.add(CartaDSSubscriber.objects.get_or_create(name_id=name_id)[0]) @@ -747,21 +816,23 @@ class AbstractCartaDSCS(BaseResource): continue cartads_file = CartaDSFile.objects.get(id=file['id']) if piece['id'] == 'cerfa-%s-%s' % (dossier.type_dossier_id, dossier.objet_demande_id): - zip_file.write( - cartads_file.uploaded_file.path, - '%s.pdf' % cerfa_id) + zip_file.write(cartads_file.uploaded_file.path, '%s.pdf' % cerfa_id) elif piece['id'].startswith('cerfa-autres-'): zip_file.write( - cartads_file.uploaded_file.path, - 'Fiches_complementaires/Cerfa_autres_demandeurs_%d.pdf' % cnt) + cartads_file.uploaded_file.path, + 'Fiches_complementaires/Cerfa_autres_demandeurs_%d.pdf' % cnt, + ) else: zip_file.write( - cartads_file.uploaded_file.path, - 'Pieces/%s-%s%s%s' % ( - piece['id'], - piece['codePiece'], - cnt, - os.path.splitext(cartads_file.uploaded_file.path)[-1])) + cartads_file.uploaded_file.path, + 'Pieces/%s-%s%s%s' + % ( + piece['id'], + piece['codePiece'], + cnt, + os.path.splitext(cartads_file.uploaded_file.path)[-1], + ), + ) cnt += 1 zip_file.close() dossier.zip_ready = True @@ -772,9 +843,7 @@ class AbstractCartaDSCS(BaseResource): ftp = FTP(self.ftp_server) ftp.login(self.ftp_username, self.ftp_password) ftp.cwd(self.ftp_client_name) - ftp.storbinary( - 'STOR %s' % os.path.basename(zip_filename), - open(zip_filename, 'rb')) + ftp.storbinary('STOR %s' % os.path.basename(zip_filename), open(zip_filename, 'rb')) ftp.quit() def send_to_cartads(self, dossier_id): @@ -784,33 +853,35 @@ class AbstractCartaDSCS(BaseResource): client = self.soap_client() resp = client.service.NotifierDepotDossier( - self.get_token(), - dossier.commune_id, - dossier.type_dossier_id, - os.path.basename(zip_filename), - dossier.email, - key_value_of_stringstring( - { - 'NotificationMailDemandeur': '0', - 'IdDossierExterne': 'publik-%s-%s' % (dossier.id, dossier.tracking_code), - 'NumeroDossierExterne': 'publik-%s-%s' % (dossier.id, dossier.tracking_code), - 'TraitementImmediat': '0', - 'UrlNotification': dossier.notification_url, - })) + self.get_token(), + dossier.commune_id, + dossier.type_dossier_id, + os.path.basename(zip_filename), + dossier.email, + key_value_of_stringstring( + { + 'NotificationMailDemandeur': '0', + 'IdDossierExterne': 'publik-%s-%s' % (dossier.id, dossier.tracking_code), + 'NumeroDossierExterne': 'publik-%s-%s' % (dossier.id, dossier.tracking_code), + 'TraitementImmediat': '0', + 'UrlNotification': dossier.notification_url, + } + ), + ) dossier.zip_sent = True dossier.zip_ack_response = str(resp) dossier.save() CartaDSFile.objects.filter(tracking_code=dossier.tracking_code).update(sent_to_cartads=now()) self.sync_subscribers_role(dossier) - @endpoint(pattern='^(?P[\w:_-]+)/$', - methods=['post'], - description=_('Notification of file processing by Cart@DS CS'), - parameters={ - 'signed_dossier_id': { - 'description': _('Signed identifier of file') - }, - }) + @endpoint( + pattern='^(?P[\w:_-]+)/$', + methods=['post'], + description=_('Notification of file processing by Cart@DS CS'), + parameters={ + 'signed_dossier_id': {'description': _('Signed identifier of file')}, + }, + ) def notification(self, request, signed_dossier_id): signer = Signer(salt='cart@ds_cs/dossier') dossier_id = signer.unsign(signed_dossier_id) @@ -818,17 +889,23 @@ class AbstractCartaDSCS(BaseResource): dossier.notification_message = request.POST['notification'] notification = etree.fromstring(dossier.notification_message.encode('utf-8')) if notification.find('Succes').text == 'true': - dossier.cartads_id_dossier = notification.find('InformationsComplementaires/IdDossierCartads').text - dossier.cartads_numero_dossier = notification.find('InformationsComplementaires/NumeroDossier').text + dossier.cartads_id_dossier = notification.find( + 'InformationsComplementaires/IdDossierCartads' + ).text + dossier.cartads_numero_dossier = notification.find( + 'InformationsComplementaires/NumeroDossier' + ).text self.sync_subscribers_role(dossier) dossier.save() return HttpResponse('ok', content_type='text/plain') - @endpoint(perm='can_access', - description=_('Send requested additional file items'), - parameters={ - 'tracking_code': TRACKING_CODE_PARAM, - }) + @endpoint( + perm='can_access', + description=_('Send requested additional file items'), + parameters={ + 'tracking_code': TRACKING_CODE_PARAM, + }, + ) def send_additional_pieces(self, request, tracking_code): dossier = CartaDSDossier.objects.get(tracking_code=tracking_code) self.add_job('send_additional_pieces_to_cartads', dossier_id=dossier.id) @@ -839,37 +916,43 @@ class AbstractCartaDSCS(BaseResource): client = self.soap_client(wsdl_url=self.get_wsdl_url('ServicePiece')) resp = client.service.GetPiecesDossierACompleter(self.get_token(), dossier.cartads_id_dossier) pieces = [ - {'id': 'comp-%s-%s' % (x['IdDosPiece'], x['IdPiece']), - 'idPiece': x['IdPiece'], - 'codePiece': x['CodePiece'], - } for x in resp] + { + 'id': 'comp-%s-%s' % (x['IdDosPiece'], x['IdPiece']), + 'idPiece': x['IdPiece'], + 'codePiece': x['CodePiece'], + } + for x in resp + ] client = self.soap_client(wsdl_url=self.get_wsdl_url('ServiceDocumentation')) for piece_type in pieces: for i, piece in enumerate( - CartaDSFile.objects.filter( - tracking_code=dossier.tracking_code, - id_piece=piece_type['id'])): + CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, id_piece=piece_type['id']) + ): if piece.sent_to_cartads: continue id_dos_piece = piece.id_piece.split('-')[1] filename = '%s-%s%s%s' % ( piece_type['idPiece'], piece_type['codePiece'], - '%03d' % (i+1), - os.path.splitext(piece.uploaded_file.name)[-1]) + '%03d' % (i + 1), + os.path.splitext(piece.uploaded_file.name)[-1], + ) content = piece.uploaded_file.read() try: - resp = client.service.UploadFile(FileByteStream=content, - _soapheaders={ - 'IdDossier': dossier.cartads_id_dossier, - 'NomFichier': filename, - 'Length': piece.uploaded_file.size, - 'Token': self.get_token(), - 'InformationsComplementaires': key_value_of_stringstring( - {'idDosPiece': id_dos_piece}), - }) + resp = client.service.UploadFile( + FileByteStream=content, + _soapheaders={ + 'IdDossier': dossier.cartads_id_dossier, + 'NomFichier': filename, + 'Length': piece.uploaded_file.size, + 'Token': self.get_token(), + 'InformationsComplementaires': key_value_of_stringstring( + {'idDosPiece': id_dos_piece} + ), + }, + ) except zeep.exceptions.Fault as e: self.logger.exception('error pushing file item %d (%s)', piece.id, e) continue @@ -878,11 +961,13 @@ class AbstractCartaDSCS(BaseResource): piece.sent_to_cartads = now() piece.save() - @endpoint(perm='can_access', - description=_('Send DOC file items'), - parameters={ - 'tracking_code': TRACKING_CODE_PARAM, - }) + @endpoint( + perm='can_access', + description=_('Send DOC file items'), + parameters={ + 'tracking_code': TRACKING_CODE_PARAM, + }, + ) def send_doc_pieces(self, request, tracking_code): dossier = CartaDSDossier.objects.get(tracking_code=tracking_code) self.add_job('send_doc_pieces_to_cartads', dossier_id=dossier.id) @@ -892,26 +977,25 @@ class AbstractCartaDSCS(BaseResource): dossier = CartaDSDossier.objects.get(id=dossier_id) client = self.soap_client(wsdl_url=self.get_wsdl_url('ServiceDocumentation')) pieces = CartaDSFile.objects.filter( - tracking_code=dossier.tracking_code, - id_piece='cerfa-doc', - sent_to_cartads__isnull=True) + tracking_code=dossier.tracking_code, id_piece='cerfa-doc', sent_to_cartads__isnull=True + ) assert pieces.count() == 1 piece = pieces[0] content = piece.uploaded_file.read() try: - resp = client.service.UploadFile(FileByteStream=content, - _soapheaders={ - 'IdDossier': dossier.cartads_id_dossier, - 'NomFichier': 'cerfa-doc.pdf', - 'Length': piece.uploaded_file.size, - 'Token': self.get_token(), - 'InformationsComplementaires': key_value_of_stringstring( - {'docDaact': 'doc', - 'renameFile': 'true' - }, - ), - }) + resp = client.service.UploadFile( + FileByteStream=content, + _soapheaders={ + 'IdDossier': dossier.cartads_id_dossier, + 'NomFichier': 'cerfa-doc.pdf', + 'Length': piece.uploaded_file.size, + 'Token': self.get_token(), + 'InformationsComplementaires': key_value_of_stringstring( + {'docDaact': 'doc', 'renameFile': 'true'}, + ), + }, + ) except zeep.exceptions.Fault as e: self.logger.exception('error pushing file item %d (%s)', piece.id, e) else: @@ -919,11 +1003,13 @@ class AbstractCartaDSCS(BaseResource): piece.sent_to_cartads = now() piece.save() - @endpoint(perm='can_access', - description=_('Send DAACT file items'), - parameters={ - 'tracking_code': TRACKING_CODE_PARAM, - }) + @endpoint( + perm='can_access', + description=_('Send DAACT file items'), + parameters={ + 'tracking_code': TRACKING_CODE_PARAM, + }, + ) def send_daact_pieces(self, request, tracking_code): dossier = CartaDSDossier.objects.get(tracking_code=tracking_code) self.add_job('send_daact_pieces_to_cartads', dossier_id=dossier.id) @@ -936,9 +1022,8 @@ class AbstractCartaDSCS(BaseResource): client = self.soap_client(wsdl_url=self.get_wsdl_url('ServiceDocumentation')) for piece_type in pieces: for piece in CartaDSFile.objects.filter( - tracking_code=dossier.tracking_code, - sent_to_cartads__isnull=True, - id_piece=piece_type['id']): + tracking_code=dossier.tracking_code, sent_to_cartads__isnull=True, id_piece=piece_type['id'] + ): content = piece.uploaded_file.read() try: infos = { @@ -950,15 +1035,20 @@ class AbstractCartaDSCS(BaseResource): else: infos['docDaact'] = 'pieceDaact' infos['idPieceDaact'] = piece.id_piece.split('-', 1)[-1] - filename = '%s%s' % (piece_type['codePiece'], os.path.splitext(piece.uploaded_file.name)[-1]) - resp = client.service.UploadFile(FileByteStream=content, - _soapheaders={ - 'IdDossier': dossier.cartads_id_dossier, - 'NomFichier': filename, - 'Length': piece.uploaded_file.size, - 'Token': self.get_token(), - 'InformationsComplementaires': key_value_of_stringstring(infos) - }) + filename = '%s%s' % ( + piece_type['codePiece'], + os.path.splitext(piece.uploaded_file.name)[-1], + ) + resp = client.service.UploadFile( + FileByteStream=content, + _soapheaders={ + 'IdDossier': dossier.cartads_id_dossier, + 'NomFichier': filename, + 'Length': piece.uploaded_file.size, + 'Token': self.get_token(), + 'InformationsComplementaires': key_value_of_stringstring(infos), + }, + ) except zeep.exceptions.Fault as e: self.logger.exception('error pushing daact file item %d (%s)', piece.id, e) else: @@ -1005,82 +1095,88 @@ class AbstractCartaDSCS(BaseResource): response.update({'status_id': status_id, 'status_label': status_label}) return response - @endpoint(perm='can_access', - description=_('Get status of file'), - parameters={ - 'dossier_id': { - 'description': _('Identifier of file'), - } - }) + @endpoint( + perm='can_access', + description=_('Get status of file'), + parameters={ + 'dossier_id': { + 'description': _('Identifier of file'), + } + }, + ) def status(self, request, dossier_id): dossier = CartaDSDossier.objects.get(id=dossier_id) return self.get_file_status(dossier) - @endpoint(perm='can_access', - description=_('Get list of files attached to user'), - parameters={ - 'name_id': { - 'description': _('UUID of requester'), - 'example_value': '3eb56fc' - }, - 'status': { - 'description': _('File Status'), - 'example_value': 'Attente DOC', - }, - }) + @endpoint( + perm='can_access', + description=_('Get list of files attached to user'), + parameters={ + 'name_id': {'description': _('UUID of requester'), 'example_value': '3eb56fc'}, + 'status': { + 'description': _('File Status'), + 'example_value': 'Attente DOC', + }, + }, + ) def files(self, request, name_id, status=None): - files = CartaDSDossier.objects.filter( - subscribers__name_id__in=[name_id]) + files = CartaDSDossier.objects.filter(subscribers__name_id__in=[name_id]) if status: - files = [x for x in files if - self.get_file_status(x).get('status_id') == status or - self.get_file_status(x).get('status_label') == status] + files = [ + x + for x in files + if self.get_file_status(x).get('status_id') == status + or self.get_file_status(x).get('status_label') == status + ] def get_date(dossier): if dossier.cartads_cache_infos and dossier.cartads_cache_infos['DateDepot']: return dossier.cartads_cache_infos['DateDepot'] return '' + files = list(files) files.sort(key=get_date) - return {'data': [ - {'id': str(x.id), - 'text': x.cartads_numero_dossier, - 'tracking_code': x.tracking_code, - 'status': self.get_file_status(x), - 'commune_label': self.get_commune_label(x.commune_id), - 'type_dossier_label': self.get_type_dossier_label(x.commune_id, x.type_dossier_id), - 'formdata_url': x.formdata_url, - 'cartads_infos': x.cartads_cache_infos, - } for x in files] + return { + 'data': [ + { + 'id': str(x.id), + 'text': x.cartads_numero_dossier, + 'tracking_code': x.tracking_code, + 'status': self.get_file_status(x), + 'commune_label': self.get_commune_label(x.commune_id), + 'type_dossier_label': self.get_type_dossier_label(x.commune_id, x.type_dossier_id), + 'formdata_url': x.formdata_url, + 'cartads_infos': x.cartads_cache_infos, + } + for x in files + ] } - @endpoint(perm='can_access', - description=_('Join dossier'), - parameters={ - 'name_id': { - 'description': _('UUID of requester'), - 'example_value': '3eb56fc' - }, - 'dossier_number': { - 'description': _('Dossier Number'), - 'example_value': 'PC 069 012 23 45678', - }, - 'dossier_password': { - 'description': _('Dossier Password'), - 'example_value': '5A3E36FE-80D3-45E5-9323-7415E04D3B14', - }, - 'formdata_url': { - 'description': _('URL of user form'), - }, - }) + @endpoint( + perm='can_access', + description=_('Join dossier'), + parameters={ + 'name_id': {'description': _('UUID of requester'), 'example_value': '3eb56fc'}, + 'dossier_number': { + 'description': _('Dossier Number'), + 'example_value': 'PC 069 012 23 45678', + }, + 'dossier_password': { + 'description': _('Dossier Password'), + 'example_value': '5A3E36FE-80D3-45E5-9323-7415E04D3B14', + }, + 'formdata_url': { + 'description': _('URL of user form'), + }, + }, + ) def join(self, request, name_id, dossier_number, dossier_password, formdata_url=None): client = self.soap_client(wsdl_url=self.get_wsdl_url('ServiceSuiviNumerique')) try: resp = client.service.ActiverServiceSuiviNumerique( - self.get_token(), - dossier_number, - dossier_password) + self.get_token(), dossier_number, dossier_password + ) except zeep.exceptions.Fault as e: self.logger.error('error joining dossier %s (%s)', dossier_number, e) raise APIError('error joining dossier (wrong password?)') @@ -1089,9 +1185,7 @@ class AbstractCartaDSCS(BaseResource): if created: dossier.cartads_numero_dossier = dossier_number client_dossier = self.soap_client(wsdl_url=self.get_wsdl_url('ServiceRechercheDossier')) - infos = client_dossier.service.GetInfosDossier( - self.client_name, - id_dossier) + infos = client_dossier.service.GetInfosDossier(self.client_name, id_dossier) dossier.type_dossier_id = infos['CoTypeDossier'] dossier.commune_id = self.get_commune_id(infos['Commune']) dossier.formdata_url = formdata_url @@ -1102,24 +1196,24 @@ class AbstractCartaDSCS(BaseResource): dossier.subscribers.add(CartaDSSubscriber.objects.get_or_create(name_id=name_id)[0]) self.sync_subscribers_role(dossier) - return {'err': 0, - 'dossier_id': dossier.id, - 'formdata_url': dossier.formdata_url, - 'tracking_code': dossier.tracking_code, - } + return { + 'err': 0, + 'dossier_id': dossier.id, + 'formdata_url': dossier.formdata_url, + 'tracking_code': dossier.tracking_code, + } - @endpoint(perm='can_access', - description=_('Unsubscribe from dossier'), - parameters={ - 'name_id': { - 'description': _('UUID of requester'), - 'example_value': '3eb56fc' - }, - 'dossier_number': { - 'description': _('Dossier Number'), - 'example_value': 'PC 069 012 23 45678', - }, - }) + @endpoint( + perm='can_access', + description=_('Unsubscribe from dossier'), + parameters={ + 'name_id': {'description': _('UUID of requester'), 'example_value': '3eb56fc'}, + 'dossier_number': { + 'description': _('Dossier Number'), + 'example_value': 'PC 069 012 23 45678', + }, + }, + ) def unsubscribe(self, request, name_id, dossier_number): try: dossier = CartaDSDossier.objects.get(cartads_numero_dossier=dossier_number) @@ -1142,14 +1236,18 @@ class AbstractCartaDSCS(BaseResource): # sync subscribers with an authentic role, this can fail and it will # be retried again later. role_api_url = sign_url( - urlparse.urljoin( - idp_service['url'], - 'api/roles/?get_or_create=slug&orig=%s' % idp_service.get('orig')), - key=idp_service.get('secret')) - response = self.requests.post(role_api_url, - json={'name': 'Suivi Cart@DS (%s)' % dossier.id, - 'slug': '_cartads_%s' % dossier.id, - }) + urlparse.urljoin( + idp_service['url'], 'api/roles/?get_or_create=slug&orig=%s' % idp_service.get('orig') + ), + key=idp_service.get('secret'), + ) + response = self.requests.post( + role_api_url, + json={ + 'name': 'Suivi Cart@DS (%s)' % dossier.id, + 'slug': '_cartads_%s' % dossier.id, + }, + ) if response.status_code != 200: return try: @@ -1157,19 +1255,19 @@ class AbstractCartaDSCS(BaseResource): except (KeyError, TypeError, ValueError): return role_api_url = sign_url( - urlparse.urljoin( - idp_service['url'], - 'api/roles/%s/relationships/members/?orig=%s' % ( - role_uuid, - idp_service.get('orig') - )), - key=idp_service.get('secret')) - response = self.requests.patch(role_api_url, - json={'data': [{'uuid': x.name_id} for x in dossier.subscribers.all()]} - ) + urlparse.urljoin( + idp_service['url'], + 'api/roles/%s/relationships/members/?orig=%s' % (role_uuid, idp_service.get('orig')), + ), + key=idp_service.get('secret'), + ) + response = self.requests.patch( + role_api_url, json={'data': [{'uuid': x.name_id} for x in dossier.subscribers.all()]} + ) class CartaDSCS(AbstractCartaDSCS): category = _('Misc') + class Meta: verbose_name = 'Cart@DS CS' diff --git a/passerelle/apps/choosit/migrations/0001_initial.py b/passerelle/apps/choosit/migrations/0001_initial.py index bd93ca3f..4500cf92 100644 --- a/passerelle/apps/choosit/migrations/0001_initial.py +++ b/passerelle/apps/choosit/migrations/0001_initial.py @@ -14,7 +14,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='ChoositRegisterGateway', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), @@ -30,7 +33,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='ChoositRegisterNewsletter', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('name', models.CharField(max_length=16)), ('description', models.CharField(max_length=128, blank=True)), ], @@ -43,13 +49,27 @@ class Migration(migrations.Migration): migrations.CreateModel( name='ChoositSMSGateway', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), ('key', models.CharField(max_length=64, verbose_name='Key')), - ('default_country_code', models.CharField(default='33', max_length=3, verbose_name='Default country code')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_choositsmsgateway_users_+', related_query_name='+', blank=True)), + ( + 'default_country_code', + models.CharField(default='33', max_length=3, verbose_name='Default country code'), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_choositsmsgateway_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'db_table': 'sms_choosit', diff --git a/passerelle/apps/choosit/migrations/0002_auto_20160316_0528.py b/passerelle/apps/choosit/migrations/0002_auto_20160316_0528.py index 88b334cf..fa6dc52a 100644 --- a/passerelle/apps/choosit/migrations/0002_auto_20160316_0528.py +++ b/passerelle/apps/choosit/migrations/0002_auto_20160316_0528.py @@ -14,13 +14,25 @@ class Migration(migrations.Migration): migrations.AddField( model_name='choositregistergateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Debug Enabled', + blank=True, + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), migrations.AddField( model_name='choositsmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Debug Enabled', + blank=True, + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/choosit/migrations/0003_auto_20160316_0910.py b/passerelle/apps/choosit/migrations/0003_auto_20160316_0910.py index f752552c..8b0a1bf9 100644 --- a/passerelle/apps/choosit/migrations/0003_auto_20160316_0910.py +++ b/passerelle/apps/choosit/migrations/0003_auto_20160316_0910.py @@ -14,13 +14,23 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='choositregistergateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), migrations.AlterField( model_name='choositsmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/choosit/migrations/0004_auto_20160407_0456.py b/passerelle/apps/choosit/migrations/0004_auto_20160407_0456.py index a75ee065..7e125e1f 100644 --- a/passerelle/apps/choosit/migrations/0004_auto_20160407_0456.py +++ b/passerelle/apps/choosit/migrations/0004_auto_20160407_0456.py @@ -14,13 +14,37 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='choositregistergateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), preserve_default=True, ), migrations.AlterField( model_name='choositsmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/choosit/models.py b/passerelle/apps/choosit/models.py index 55b9bc4e..48fd1892 100644 --- a/passerelle/apps/choosit/models.py +++ b/passerelle/apps/choosit/models.py @@ -26,8 +26,8 @@ class ChoositSMSGateway(SMSResource): 'data': [ [u'0033688888888', u'Choosit error: bad JSON response'], [u'0033677777777', u'Choosit error: bad JSON response'], - ] - } + ], + }, }, { 'response': { @@ -40,7 +40,7 @@ class ChoositSMSGateway(SMSResource): [u'0033688888888', u'Choosit error: not ok'], [u'0033677777777', u'Choosit error: not ok'], ], - } + }, }, { 'response': { @@ -53,9 +53,8 @@ class ChoositSMSGateway(SMSResource): [u'0033688888888', {'result': u'Envoi terminé', 'sms_id': 1234}], [u'0033677777777', {'result': u'Envoi terminé', 'sms_id': 1234}], ], - } - } - + }, + }, ], } URL = 'http://sms.choosit.com/webservice' @@ -97,6 +96,5 @@ class ChoositSMSGateway(SMSResource): else: results.append(output) if any(isinstance(result, string_types) for result in results): - raise APIError('Choosit error: some destinations failed', - data=list(zip(destinations, results))) + raise APIError('Choosit error: some destinations failed', data=list(zip(destinations, results))) return list(zip(destinations, results)) diff --git a/passerelle/apps/cityweb/cityweb.py b/passerelle/apps/cityweb/cityweb.py index 00da9409..c63bd5ed 100644 --- a/passerelle/apps/cityweb/cityweb.py +++ b/passerelle/apps/cityweb/cityweb.py @@ -32,37 +32,30 @@ CERTIFICATE_TYPES = [ {"id": "NAI", "text": "Naissance"}, {"id": "MAR", "text": "Mariage"}, {"id": "REC", "text": "Reconnaissance"}, - {"id": "DEC", "text": "Décès"} + {"id": "DEC", "text": "Décès"}, ] -SEXES = [ - {"id": "M", "text": "Homme"}, - {"id": "F", "text": "Femme"}, - {"id": "NA", "text": "Autre"} -] +SEXES = [{"id": "M", "text": "Homme"}, {"id": "F", "text": "Femme"}, {"id": "NA", "text": "Autre"}] TITLES = [ {"id": "M", "text": "Monsieur"}, {"id": "Mme", "text": "Madame"}, - {"id": "Mlle", "text": "Mademoiselle"} + {"id": "Mlle", "text": "Mademoiselle"}, ] DOCUMENT_TYPES = [ {"id": "CPI", "text": "Copie intégrale"}, {"id": "EXTAF", "text": "Extrait avec filiation"}, {"id": "EXTSF", "text": "Extrait sans filiation"}, - {"id": "EXTPL", "text": "Extrait plurilingue"} + {"id": "EXTPL", "text": "Extrait plurilingue"}, ] -CONCERNED = [ - {"id": "reconnu", "text": "Reconnu"}, - {"id": "auteur", "text": "Auteur"} -] +CONCERNED = [{"id": "reconnu", "text": "Reconnu"}, {"id": "auteur", "text": "Auteur"}] ORIGINS = [ {"id": "internet", "text": "Internet"}, {"id": "guichet", "text": "Guichet"}, - {"id": "courrier", "text": "Courrier"} + {"id": "courrier", "text": "Courrier"}, ] @@ -73,8 +66,8 @@ def is_clean(element): class BaseType(object): - """Base data binding object - """ + """Base data binding object""" + tagname = None def __repr__(self): @@ -82,8 +75,7 @@ class BaseType(object): @classmethod def make_element(cls, tagname, value=None, namespace=None, nsmap=None): - M = xobject.ElementMaker(annotate=False, namespace=namespace, - nsmap=nsmap) + M = xobject.ElementMaker(annotate=False, namespace=namespace, nsmap=nsmap) return M(tagname, value) @property @@ -115,19 +107,17 @@ class CityWebType(BaseType): class SimpleType(CityWebType): - """Data binding class for SimpleType - """ + """Data binding class for SimpleType""" + allowed_values = None def __init__(self, value): if value not in self.allowed_values: - raise APIError('<%s> value (%s) not in %s' % (self.tagname, value, - self.allowed_values)) + raise APIError('<%s> value (%s) not in %s' % (self.tagname, value, self.allowed_values)) self.value = value class DateType(CityWebType): - def __init__(self, value): try: self.value = parse_date(value) @@ -139,8 +129,8 @@ class DateType(CityWebType): class ComplexType(CityWebType): - """Data binding class for ComplexType - """ + """Data binding class for ComplexType""" + sequence = None pattern = None @@ -229,8 +219,7 @@ class Place(ComplexType): class Address(ComplexType): tagname = 'adresse' - sequence = ('ligneAdr1', 'ligneAdr2', 'codePostal', - 'lieu', 'mail', 'tel') + sequence = ('ligneAdr1', 'ligneAdr2', 'codePostal', 'lieu', 'mail', 'tel') pattern = 'address_' def __init__(self, data): @@ -273,8 +262,7 @@ class EventPlace(Place): class Person(ComplexType): - sequence = ('noms', 'prenoms', 'genre', 'adresse', 'sexe', - 'pere', 'mere', 'naissance') + sequence = ('noms', 'prenoms', 'genre', 'adresse', 'sexe', 'pere', 'mere', 'naissance') def __init__(self, data): super(Person, self).__init__(data) @@ -314,8 +302,7 @@ class Parent(Person): class ConcernedCommon(Person): - sequence = ('noms', 'prenoms', 'genre', 'sexe', - 'parent1', 'parent2', 'naissance') + sequence = ('noms', 'prenoms', 'genre', 'sexe', 'parent1', 'parent2', 'naissance') def __init__(self, data): super(ConcernedCommon, self).__init__(data) @@ -344,8 +331,7 @@ class Applicant(ComplexType): class Event(ComplexType): tagname = 'evenement' - sequence = ('interesse', 'conjoint', 'natureEvenement', - 'typeInteresse', 'dateEvenement', 'lieuEvenement') + sequence = ('interesse', 'conjoint', 'natureEvenement', 'typeInteresse', 'dateEvenement', 'lieuEvenement') def __init__(self, data): certificate_type = data['certificate_type'] @@ -362,8 +348,16 @@ class Event(ComplexType): class CivilStatusApplication(ComplexType): tagname = 'demandeEtatCivil' sequence = ( - 'identifiant', 'demandeur', 'natureDocument', 'nbExemplaire', - 'dateDemande', 'evenement', 'motif', 'origine', 'commentaire') + 'identifiant', + 'demandeur', + 'natureDocument', + 'nbExemplaire', + 'dateDemande', + 'evenement', + 'motif', + 'origine', + 'commentaire', + ) def __init__(self, data): self.identifiant = data['application_id'] @@ -388,5 +382,5 @@ class CivilStatusApplication(ComplexType): with atomic_write(filepath) as fd: fd.write(force_bytes(content)) # set read only permission for owner and group - os.chmod(filepath, stat.S_IRUSR|stat.S_IRGRP) + os.chmod(filepath, stat.S_IRUSR | stat.S_IRGRP) return filename diff --git a/passerelle/apps/cityweb/migrations/0001_initial.py b/passerelle/apps/cityweb/migrations/0001_initial.py index c3370869..7ce572a3 100644 --- a/passerelle/apps/cityweb/migrations/0001_initial.py +++ b/passerelle/apps/cityweb/migrations/0001_initial.py @@ -14,12 +14,35 @@ class Migration(migrations.Migration): migrations.CreateModel( name='CityWeb', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_cityweb_users_+', related_query_name='+', blank=True)), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_cityweb_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': "CityWeb - Demande d'acte d'\xe9tat civil", diff --git a/passerelle/apps/cityweb/models.py b/passerelle/apps/cityweb/models.py index 879aeaf0..dc1dfd18 100644 --- a/passerelle/apps/cityweb/models.py +++ b/passerelle/apps/cityweb/models.py @@ -24,8 +24,15 @@ from passerelle.compat import json_loads from passerelle.utils.api import endpoint from passerelle.utils.jsonresponse import APIError -from .cityweb import (CivilStatusApplication, TITLES, SEXES, DOCUMENT_TYPES, - CERTIFICATE_TYPES, CONCERNED, ORIGINS) +from .cityweb import ( + CivilStatusApplication, + TITLES, + SEXES, + DOCUMENT_TYPES, + CERTIFICATE_TYPES, + CONCERNED, + ORIGINS, +) class CityWeb(BaseResource): @@ -52,8 +59,7 @@ class CityWeb(BaseResource): @property def basepath(self): - return os.path.join( - default_storage.path('cityweb'), self.slug) + return os.path.join(default_storage.path('cityweb'), self.slug) @endpoint(perm='can_access', description=_('Get title list')) def titles(self, request): @@ -71,14 +77,20 @@ class CityWeb(BaseResource): def origins(self, request): return {'data': ORIGINS} - @endpoint(name='certificate-types', perm='can_access', - description=_('Get certificate type list'), parameters={'exclude': {'example_value': 'REC'}}) + @endpoint( + name='certificate-types', + perm='can_access', + description=_('Get certificate type list'), + parameters={'exclude': {'example_value': 'REC'}}, + ) def certificate_types(self, request, exclude=''): - return {'data': [item for item in CERTIFICATE_TYPES - if item.get('id') not in exclude.split(',')]} + return {'data': [item for item in CERTIFICATE_TYPES if item.get('id') not in exclude.split(',')]} - @endpoint(name='document-types', perm='can_access', - description=_('Get document type list'), parameters={'exclude': {'example_value': 'EXTPL'}}) + @endpoint( + name='document-types', + perm='can_access', + description=_('Get document type list'), + parameters={'exclude': {'example_value': 'EXTPL'}}, + ) def document_types(self, request, exclude=''): - return {'data': [item for item in DOCUMENT_TYPES - if item.get('id') not in exclude.split(',')]} + return {'data': [item for item in DOCUMENT_TYPES if item.get('id') not in exclude.split(',')]} diff --git a/passerelle/apps/clicrdv/migrations/0001_initial.py b/passerelle/apps/clicrdv/migrations/0001_initial.py index 31f37e78..8d89150a 100644 --- a/passerelle/apps/clicrdv/migrations/0001_initial.py +++ b/passerelle/apps/clicrdv/migrations/0001_initial.py @@ -6,8 +6,6 @@ from django.db import models, migrations class Migration(migrations.Migration): - dependencies = [ - ] + dependencies = [] - operations = [ - ] + operations = [] diff --git a/passerelle/apps/clicrdv/migrations/0001_squashed_0006_auto_20170920_0951.py b/passerelle/apps/clicrdv/migrations/0001_squashed_0006_auto_20170920_0951.py index 12347687..9eee6235 100644 --- a/passerelle/apps/clicrdv/migrations/0001_squashed_0006_auto_20170920_0951.py +++ b/passerelle/apps/clicrdv/migrations/0001_squashed_0006_auto_20170920_0951.py @@ -7,7 +7,14 @@ from django.db import migrations, models class Migration(migrations.Migration): - replaces = [('clicrdv', '0001_initial'), ('clicrdv', '0002_clicrdv_group_id'), ('clicrdv', '0003_auto_20160920_0903'), ('clicrdv', '0004_newclicrdv'), ('clicrdv', '0005_auto_20161218_1701'), ('clicrdv', '0006_auto_20170920_0951')] + replaces = [ + ('clicrdv', '0001_initial'), + ('clicrdv', '0002_clicrdv_group_id'), + ('clicrdv', '0003_auto_20160920_0903'), + ('clicrdv', '0004_newclicrdv'), + ('clicrdv', '0005_auto_20161218_1701'), + ('clicrdv', '0006_auto_20170920_0951'), + ] initial = True @@ -20,19 +27,59 @@ class Migration(migrations.Migration): migrations.CreateModel( name='ClicRdv', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(choices=[('NOTSET', 'NOTSET'), ('DEBUG', 'DEBUG'), ('INFO', 'INFO'), ('WARNING', 'WARNING'), ('ERROR', 'ERROR'), ('CRITICAL', 'CRITICAL')], default='INFO', max_length=10, verbose_name='Log Level')), - ('server', models.CharField(choices=[('www.clicrdv.com', 'Production (www.clicrdv.com)'), ('sandbox.clicrdv.com', 'SandBox (sandbox.clicrdv.com)')], default='sandbox.clicrdv.com', max_length=64, verbose_name='Server')), + ( + 'log_level', + models.CharField( + choices=[ + ('NOTSET', 'NOTSET'), + ('DEBUG', 'DEBUG'), + ('INFO', 'INFO'), + ('WARNING', 'WARNING'), + ('ERROR', 'ERROR'), + ('CRITICAL', 'CRITICAL'), + ], + default='INFO', + max_length=10, + verbose_name='Log Level', + ), + ), + ( + 'server', + models.CharField( + choices=[ + ('www.clicrdv.com', 'Production (www.clicrdv.com)'), + ('sandbox.clicrdv.com', 'SandBox (sandbox.clicrdv.com)'), + ], + default='sandbox.clicrdv.com', + max_length=64, + verbose_name='Server', + ), + ), ('group_id', models.IntegerField(default=0, verbose_name='Group Id')), ('apikey', models.CharField(max_length=64, verbose_name='API Key')), ('username', models.CharField(max_length=64, verbose_name='Username')), ('password', models.CharField(max_length=64, verbose_name='Password')), - ('websource', models.CharField(blank=True, max_length=64, null=True, verbose_name='Web source')), - ('default_comment', models.CharField(blank=True, max_length=250, null=True, verbose_name='Default comment')), - ('users', models.ManyToManyField(blank=True, related_name='_clicrdv_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'websource', + models.CharField(blank=True, max_length=64, null=True, verbose_name='Web source'), + ), + ( + 'default_comment', + models.CharField(blank=True, max_length=250, null=True, verbose_name='Default comment'), + ), + ( + 'users', + models.ManyToManyField( + blank=True, related_name='_clicrdv_users_+', related_query_name='+', to='base.ApiUser' + ), + ), ], options={ 'verbose_name': 'Clicrdv Agenda', diff --git a/passerelle/apps/clicrdv/migrations/0002_clicrdv_group_id.py b/passerelle/apps/clicrdv/migrations/0002_clicrdv_group_id.py index c9f15624..ebe847a1 100644 --- a/passerelle/apps/clicrdv/migrations/0002_clicrdv_group_id.py +++ b/passerelle/apps/clicrdv/migrations/0002_clicrdv_group_id.py @@ -10,5 +10,4 @@ class Migration(migrations.Migration): ('clicrdv', '0001_initial'), ] - operations = [ - ] + operations = [] diff --git a/passerelle/apps/clicrdv/migrations/0002_remove_clicrdv_log_level.py b/passerelle/apps/clicrdv/migrations/0002_remove_clicrdv_log_level.py index 3d7cf1bc..b27ef2db 100644 --- a/passerelle/apps/clicrdv/migrations/0002_remove_clicrdv_log_level.py +++ b/passerelle/apps/clicrdv/migrations/0002_remove_clicrdv_log_level.py @@ -7,13 +7,13 @@ from django.db import migrations class Migration(migrations.Migration): - dependencies = [ + dependencies = [ ('clicrdv', '0001_squashed_0006_auto_20170920_0951'), - ] + ] - operations = [ + operations = [ migrations.RemoveField( model_name='clicrdv', name='log_level', ), - ] + ] diff --git a/passerelle/apps/clicrdv/migrations/0003_auto_20160920_0903.py b/passerelle/apps/clicrdv/migrations/0003_auto_20160920_0903.py index 95f131ab..d371eb3d 100644 --- a/passerelle/apps/clicrdv/migrations/0003_auto_20160920_0903.py +++ b/passerelle/apps/clicrdv/migrations/0003_auto_20160920_0903.py @@ -10,5 +10,4 @@ class Migration(migrations.Migration): ('clicrdv', '0002_clicrdv_group_id'), ] - operations = [ - ] + operations = [] diff --git a/passerelle/apps/clicrdv/migrations/0004_newclicrdv.py b/passerelle/apps/clicrdv/migrations/0004_newclicrdv.py index a82ec2d7..16e564d7 100644 --- a/passerelle/apps/clicrdv/migrations/0004_newclicrdv.py +++ b/passerelle/apps/clicrdv/migrations/0004_newclicrdv.py @@ -3,6 +3,7 @@ from __future__ import unicode_literals from django.db import migrations, models + class Migration(migrations.Migration): dependencies = [ @@ -15,19 +16,56 @@ class Migration(migrations.Migration): migrations.CreateModel( name='NewClicRdv', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier')), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default='NOTSET', max_length=10, verbose_name='Log Level', choices=[('NOTSET', 'NOTSET'), ('DEBUG', 'DEBUG'), ('INFO', 'INFO'), ('WARNING', 'WARNING'), ('ERROR', 'ERROR'), ('CRITICAL', 'CRITICAL'), ('FATAL', 'FATAL')])), - ('server', models.CharField(default='sandbox.clicrdv.com', max_length=64, choices=[('www.clicrdv.com', 'Production (www.clicrdv.com)'), ('sandbox.clicrdv.com', 'SandBox (sandbox.clicrdv.com)')])), + ( + 'log_level', + models.CharField( + default='NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + ('NOTSET', 'NOTSET'), + ('DEBUG', 'DEBUG'), + ('INFO', 'INFO'), + ('WARNING', 'WARNING'), + ('ERROR', 'ERROR'), + ('CRITICAL', 'CRITICAL'), + ('FATAL', 'FATAL'), + ], + ), + ), + ( + 'server', + models.CharField( + default='sandbox.clicrdv.com', + max_length=64, + choices=[ + ('www.clicrdv.com', 'Production (www.clicrdv.com)'), + ('sandbox.clicrdv.com', 'SandBox (sandbox.clicrdv.com)'), + ], + ), + ), ('group_id', models.IntegerField(default=0)), ('apikey', models.CharField(max_length=64)), ('username', models.CharField(max_length=64)), ('password', models.CharField(max_length=64)), ('websource', models.CharField(max_length=64, null=True, blank=True)), ('default_comment', models.CharField(max_length=250, null=True, blank=True)), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_newclicrdv_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_newclicrdv_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'Clicrdv Agenda', diff --git a/passerelle/apps/clicrdv/models.py b/passerelle/apps/clicrdv/models.py index 5870c99b..0c3dba4a 100644 --- a/passerelle/apps/clicrdv/models.py +++ b/passerelle/apps/clicrdv/models.py @@ -22,15 +22,14 @@ from passerelle.utils.api import endpoint CLICRDV_SERVERS = ( ('www.clicrdv.com', 'Production (www.clicrdv.com)'), - ('sandbox.clicrdv.com', 'SandBox (sandbox.clicrdv.com)') + ('sandbox.clicrdv.com', 'SandBox (sandbox.clicrdv.com)'), ) + class ClicRdv(BaseResource): server = models.CharField( - _('Server'), - max_length=64, - choices=CLICRDV_SERVERS, - default='sandbox.clicrdv.com') + _('Server'), max_length=64, choices=CLICRDV_SERVERS, default='sandbox.clicrdv.com' + ) group_id = models.IntegerField(_('Group Id'), default=0) apikey = models.CharField(_('API Key'), max_length=64) username = models.CharField(_('Username'), max_length=64) @@ -120,8 +119,7 @@ class ClicRdv(BaseResource): datetimes = [] for timeslot in self.get_available_timeslots(intervention): parsed = datetime.datetime.strptime(timeslot, '%Y-%m-%d %H:%M:%S') - datetimed = {'id': parsed.strftime('%Y-%m-%d-%H:%M:%S'), - 'text': date_format(parsed, 'j F Y H:i')} + datetimed = {'id': parsed.strftime('%Y-%m-%d-%H:%M:%S'), 'text': date_format(parsed, 'j F Y H:i')} datetimes.append(datetimed) datetimes.sort(key=lambda x: x.get('id')) return datetimes @@ -130,8 +128,7 @@ class ClicRdv(BaseResource): dates = [] for timeslot in self.get_available_timeslots(intervention): parsed = datetime.datetime.strptime(timeslot, '%Y-%m-%d %H:%M:%S') - date = {'id': parsed.strftime('%Y-%m-%d'), - 'text': date_format(parsed, 'j F Y')} + date = {'id': parsed.strftime('%Y-%m-%d'), 'text': date_format(parsed, 'j F Y')} if date in dates: continue dates.append(date) @@ -142,12 +139,11 @@ class ClicRdv(BaseResource): if not date: raise Exception('no date value') times = [] - for timeslot in self.get_available_timeslots(intervention, - date_start='%s 00:00:00' % date, - date_end='%s 23:59:59' % date): + for timeslot in self.get_available_timeslots( + intervention, date_start='%s 00:00:00' % date, date_end='%s 23:59:59' % date + ): parsed = datetime.datetime.strptime(timeslot, '%Y-%m-%d %H:%M:%S') - timed = {'id': parsed.strftime('%H:%M:%S'), - 'text': time_format(parsed, 'H:i')} + timed = {'id': parsed.strftime('%H:%M:%S'), 'text': time_format(parsed, 'H:i')} times.append(timed) times.sort(key=lambda x: x.get('id')) return times @@ -158,12 +154,13 @@ class ClicRdv(BaseResource): return response return {'success': True} - def create_appointment(self, intervention, websource, data): fields = data.get('fields') or {} extra = data.get('extra') or {} + def get_data(key, default=None): return data.get(key) or extra.get(key) or fields.get(key) or default + if intervention: intervention = int(intervention) else: @@ -184,17 +181,17 @@ class ClicRdv(BaseResource): 'email': get_data('clicrdv_email', ''), 'firstphone': get_data('clicrdv_firstphone', ''), 'secondphone': get_data('clicrdv_secondphone', ''), - }, - 'date': date, - 'intervention_ids': [intervention], - 'websource': websource, + }, + 'date': date, + 'intervention_ids': [intervention], + 'websource': websource, }, } comments = get_data('clicrdv_comments') or self.default_comment if comments: appointment['comments'] = comments # optional parameters, if any... - for fieldname in (list(fields.keys()) + list(extra.keys()) + list(data.keys())): + for fieldname in list(fields.keys()) + list(extra.keys()) + list(data.keys()): if fieldname.startswith('clicrdv_fiche_'): appointment['appointment']['fiche'][fieldname[14:]] = get_data(fieldname) or '' response = self.request('appointments', 'post', json=appointment) diff --git a/passerelle/apps/clicrdv/urls.py b/passerelle/apps/clicrdv/urls.py index 0542c0b2..ed2273eb 100644 --- a/passerelle/apps/clicrdv/urls.py +++ b/passerelle/apps/clicrdv/urls.py @@ -7,17 +7,34 @@ from passerelle.apps.clicrdv.views import * urlpatterns = [ url(r'^(?P[\w,-]+)/$', ClicRdvDetailView.as_view(), name='clicrdv-view'), - - url(r'^(?P[\w,-]+)/interventions/(?P\d+)/datetimes/$', - DateTimesView.as_view(), name='clicrdv-datetimes'), - url(r'^(?P[\w,-]+)/interventions/(?P\d+)/dates/$', - DatesView.as_view(), name='clicrdv-dates'), - url(r'^(?P[\w,-]+)/interventions/(?P\d+)/(?P[\d-]+)/times$', - TimesView.as_view(), name='clicrdv-times'), - url(r'^(?P[\w,-]+)/interventions/(?P\d+)/create$', - csrf_exempt(CreateAppointmentView.as_view()), name='clicrdv-create-appointment'), - url(r'^(?P[\w,-]+)/create$', - csrf_exempt(CreateAppointmentView.as_view()), name='clicrdv-create-appointment-qs'), - url(r'^(?P[\w,-]+)/(?P\d+)/cancel$', - CancelAppointmentView.as_view(), name='clicrdv-cancel-appointment'), + url( + r'^(?P[\w,-]+)/interventions/(?P\d+)/datetimes/$', + DateTimesView.as_view(), + name='clicrdv-datetimes', + ), + url( + r'^(?P[\w,-]+)/interventions/(?P\d+)/dates/$', + DatesView.as_view(), + name='clicrdv-dates', + ), + url( + r'^(?P[\w,-]+)/interventions/(?P\d+)/(?P[\d-]+)/times$', + TimesView.as_view(), + name='clicrdv-times', + ), + url( + r'^(?P[\w,-]+)/interventions/(?P\d+)/create$', + csrf_exempt(CreateAppointmentView.as_view()), + name='clicrdv-create-appointment', + ), + url( + r'^(?P[\w,-]+)/create$', + csrf_exempt(CreateAppointmentView.as_view()), + name='clicrdv-create-appointment-qs', + ), + url( + r'^(?P[\w,-]+)/(?P\d+)/cancel$', + CancelAppointmentView.as_view(), + name='clicrdv-cancel-appointment', + ), ] diff --git a/passerelle/apps/clicrdv/views.py b/passerelle/apps/clicrdv/views.py index 4d67d9e3..3c9f1e68 100644 --- a/passerelle/apps/clicrdv/views.py +++ b/passerelle/apps/clicrdv/views.py @@ -16,6 +16,7 @@ class DateTimesView(View, SingleObjectMixin): input: https//passerelle/clicrdv/foobar/interventions/887/datetimes """ + model = ClicRdv @utils.to_json() @@ -31,6 +32,7 @@ class DatesView(View, SingleObjectMixin): { data: [ { id: '2014-05-07', text: "7 mai 2014" }, { id: '2014-05-13', text: "13 mai 2014" } ], err: 0 } """ + model = ClicRdv @utils.to_json() @@ -46,6 +48,7 @@ class TimesView(View, SingleObjectMixin): { data: [ { id: '15:10:00', text: "15:10" }, { id: '15:30:00', text: "15:30" } ], err: 0 } """ + model = ClicRdv @utils.to_json() @@ -77,6 +80,7 @@ class CreateAppointmentView(View, SingleObjectMixin): output: { data: { 'success': true, 'appointment_id': 123 }, err: 0 } """ + model = ClicRdv @utils.protected_api('can_manage_appointment') @@ -85,10 +89,11 @@ class CreateAppointmentView(View, SingleObjectMixin): if intervention_id is None: intervention_id = self.request.GET.get('intervention') data = json_loads(request.body) - return {'data': self.get_object().create_appointment( - intervention_id, - self.request.GET.get('websource'), - data)} + return { + 'data': self.get_object().create_appointment( + intervention_id, self.request.GET.get('websource'), data + ) + } class CancelAppointmentView(View, SingleObjectMixin): @@ -98,6 +103,7 @@ class CancelAppointmentView(View, SingleObjectMixin): output: { data: { 'success': true }, err: 0 } """ + model = ClicRdv @utils.protected_api('can_manage_appointment') diff --git a/passerelle/apps/cmis/migrations/0001_initial.py b/passerelle/apps/cmis/migrations/0001_initial.py index 7e6693e0..8518197e 100644 --- a/passerelle/apps/cmis/migrations/0001_initial.py +++ b/passerelle/apps/cmis/migrations/0001_initial.py @@ -14,15 +14,48 @@ class Migration(migrations.Migration): migrations.CreateModel( name='CmisConnector', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('cmis_endpoint', models.URLField(help_text='URL of the CMIS Atom endpoint', max_length=400, verbose_name='CMIS Atom endpoint')), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + 'cmis_endpoint', + models.URLField( + help_text='URL of the CMIS Atom endpoint', + max_length=400, + verbose_name='CMIS Atom endpoint', + ), + ), ('username', models.CharField(max_length=128, verbose_name='Service username')), ('password', models.CharField(max_length=128, verbose_name='Service password')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_cmisconnector_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_cmisconnector_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'CMIS connector', diff --git a/passerelle/apps/cmis/models.py b/passerelle/apps/cmis/models.py index 8cbe3525..14c4f6f5 100644 --- a/passerelle/apps/cmis/models.py +++ b/passerelle/apps/cmis/models.py @@ -54,7 +54,7 @@ UPLOAD_SCHEMA = { 'content': {'type': 'string'}, 'content_type': {'type': 'string'}, }, - 'required': ['content'] + 'required': ['content'], }, 'filename': { 'type': 'string', @@ -65,10 +65,7 @@ UPLOAD_SCHEMA = { 'pattern': FILE_PATH_PATTERN, }, 'object_type': {'type': 'string'}, - 'properties': { - 'type': 'object', - 'additionalProperties': {'type': 'string'} - }, + 'properties': {'type': 'object', 'additionalProperties': {'type': 'string'}}, }, 'required': ['file', 'path'], 'unflatten': True, @@ -77,8 +74,8 @@ UPLOAD_SCHEMA = { class CmisConnector(BaseResource): cmis_endpoint = models.URLField( - max_length=400, verbose_name=_('CMIS Atom endpoint'), - help_text=_('URL of the CMIS Atom endpoint')) + max_length=400, verbose_name=_('CMIS Atom endpoint'), help_text=_('URL of the CMIS Atom endpoint') + ) username = models.CharField(max_length=128, verbose_name=_('Service username')) password = models.CharField(max_length=128, verbose_name=_('Service password')) category = _('Business Process Connectors') @@ -94,7 +91,8 @@ class CmisConnector(BaseResource): 'application/json': UPLOAD_SCHEMA, } } - }) + }, + ) def uploadfile(self, request, post_data): error, error_msg, data = self._validate_inputs(post_data) if error: @@ -114,7 +112,7 @@ class CmisConnector(BaseResource): return {'data': {'properties': doc.properties}} def _validate_inputs(self, data): - """ process dict + """process dict return a tuple (error, error_msg, data) """ file_ = data['file'] @@ -149,11 +147,11 @@ def wrap_cmis_error(f): raise APIError("invalid property name: %s" % e) except CmisException as e: raise APIError("cmis binding error: %s" % e) + return wrapper class CMISGateway(object): - def __init__(self, cmis_endpoint, username, password, logger): self._cmis_client = CmisClient(cmis_endpoint, username, password) self._logger = logger @@ -182,11 +180,13 @@ class CMISGateway(object): return folder @wrap_cmis_error - def create_doc(self, file_name, file_path, file_byte_content, - content_type=None, object_type=None, properties=None): + def create_doc( + self, file_name, file_path, file_byte_content, content_type=None, object_type=None, properties=None + ): folder = self._get_or_create_folder(file_path) properties = properties or {} if object_type: properties['cmis:objectTypeId'] = object_type - return folder.createDocument(file_name, contentFile=BytesIO(file_byte_content), - contentType=content_type, properties=properties) + return folder.createDocument( + file_name, contentFile=BytesIO(file_byte_content), contentType=content_type, properties=properties + ) diff --git a/passerelle/apps/cmis/urls.py b/passerelle/apps/cmis/urls.py index c4d81858..489d69d6 100644 --- a/passerelle/apps/cmis/urls.py +++ b/passerelle/apps/cmis/urls.py @@ -19,6 +19,5 @@ from django.conf.urls import include, url from .views import CmisTypeView management_urlpatterns = [ - url(r'^(?P[\w,-]+)/type/$', - CmisTypeView.as_view(), name='cmis-type'), + url(r'^(?P[\w,-]+)/type/$', CmisTypeView.as_view(), name='cmis-type'), ] diff --git a/passerelle/apps/cmis/views.py b/passerelle/apps/cmis/views.py index 83ccf0a8..6da98c17 100644 --- a/passerelle/apps/cmis/views.py +++ b/passerelle/apps/cmis/views.py @@ -30,8 +30,7 @@ class CmisTypeView(TemplateView): def get(self, request, *args, **kwargs): self.connector = CmisConnector.objects.get(slug=kwargs['connector_slug']) - client = CmisClient(self.connector.cmis_endpoint, self.connector.username, - self.connector.password) + client = CmisClient(self.connector.cmis_endpoint, self.connector.username, self.connector.password) self.repo = client.getDefaultRepository() type_id = request.GET.get('id') diff --git a/passerelle/apps/cryptor/migrations/0001_initial.py b/passerelle/apps/cryptor/migrations/0001_initial.py index 583eb5ab..49bf9989 100644 --- a/passerelle/apps/cryptor/migrations/0001_initial.py +++ b/passerelle/apps/cryptor/migrations/0001_initial.py @@ -20,7 +20,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='CryptedFile', fields=[ - ('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ( + 'uuid', + models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False), + ), ('filename', models.CharField(max_length=512)), ('content_type', models.CharField(max_length=128)), ('creation_timestamp', models.DateTimeField(auto_now_add=True)), @@ -29,14 +32,44 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Cryptor', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), ('description', models.TextField(verbose_name='Description')), - ('public_key', models.TextField(blank=True, validators=[passerelle.apps.cryptor.models.validate_rsa_key], verbose_name='Encryption RSA public key (PEM format)')), - ('private_key', models.TextField(blank=True, validators=[passerelle.apps.cryptor.models.validate_rsa_key], verbose_name='Decryption RSA private key (PEM format)')), - ('redirect_url_base', models.URLField(blank=True, help_text='Base URL for redirect, empty for local', max_length=256, verbose_name='Base URL of decryption system')), - ('users', models.ManyToManyField(blank=True, related_name='_cryptor_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'public_key', + models.TextField( + blank=True, + validators=[passerelle.apps.cryptor.models.validate_rsa_key], + verbose_name='Encryption RSA public key (PEM format)', + ), + ), + ( + 'private_key', + models.TextField( + blank=True, + validators=[passerelle.apps.cryptor.models.validate_rsa_key], + verbose_name='Decryption RSA private key (PEM format)', + ), + ), + ( + 'redirect_url_base', + models.URLField( + blank=True, + help_text='Base URL for redirect, empty for local', + max_length=256, + verbose_name='Base URL of decryption system', + ), + ), + ( + 'users', + models.ManyToManyField( + blank=True, related_name='_cryptor_users_+', related_query_name='+', to='base.ApiUser' + ), + ), ], options={ 'verbose_name': 'Encryption / Decryption', diff --git a/passerelle/apps/cryptor/models.py b/passerelle/apps/cryptor/models.py index b543d7ab..4877c377 100644 --- a/passerelle/apps/cryptor/models.py +++ b/passerelle/apps/cryptor/models.py @@ -52,15 +52,16 @@ FILE_SCHEMA = { "filename": {"type": "string"}, "content_type": {"type": "string"}, "content": {"type": "string"}, - } + }, } - } + }, } # encrypt and decrypt are borrowed from # https://www.pycryptodome.org/en/latest/src/examples.html#encrypt-data-with-rsa + def write_encrypt(out_file, data, key_pem): public_key = RSA.import_key(key_pem) session_key = get_random_bytes(16) @@ -115,15 +116,18 @@ def validate_rsa_key(key): class Cryptor(BaseResource): - public_key = models.TextField(blank=True, - verbose_name=_('Encryption RSA public key (PEM format)'), - validators=[validate_rsa_key]) - private_key = models.TextField(blank=True, - verbose_name=_('Decryption RSA private key (PEM format)'), - validators=[validate_rsa_key]) - redirect_url_base = models.URLField(max_length=256, blank=True, - verbose_name=_('Base URL of decryption system'), - help_text=_('Base URL for redirect, empty for local')) + public_key = models.TextField( + blank=True, verbose_name=_('Encryption RSA public key (PEM format)'), validators=[validate_rsa_key] + ) + private_key = models.TextField( + blank=True, verbose_name=_('Decryption RSA private key (PEM format)'), validators=[validate_rsa_key] + ) + redirect_url_base = models.URLField( + max_length=256, + blank=True, + verbose_name=_('Base URL of decryption system'), + help_text=_('Base URL for redirect, empty for local'), + ) category = _('Misc') @@ -136,20 +140,23 @@ class Cryptor(BaseResource): return _('this file-decrypt endpoint') def get_filename(self, uuid, create=False): - dirname = os.path.join(default_storage.path(self.get_connector_slug()), - self.slug, uuid[0:2], uuid[2:4]) + dirname = os.path.join( + default_storage.path(self.get_connector_slug()), self.slug, uuid[0:2], uuid[2:4] + ) if create: makedir(dirname) filename = os.path.join(dirname, uuid) return filename - - @endpoint(name='file-encrypt', perm='can_encrypt', - description=_('Encrypt a file'), - post={ - 'description': _('File to encrypt'), - 'request_body': {'schema': {'application/json': FILE_SCHEMA}} - }) + @endpoint( + name='file-encrypt', + perm='can_encrypt', + description=_('Encrypt a file'), + post={ + 'description': _('File to encrypt'), + 'request_body': {'schema': {'application/json': FILE_SCHEMA}}, + }, + ) def file_encrypt(self, request, post_data): if not self.public_key: raise APIError('missing public key') @@ -168,8 +175,7 @@ class Cryptor(BaseResource): if self.redirect_url_base: redirect_url_base = self.redirect_url_base else: - redirect_url_base = request.build_absolute_uri('%sfile-decrypt/' % ( - self.get_absolute_url(),)) + redirect_url_base = request.build_absolute_uri('%sfile-decrypt/' % (self.get_absolute_url(),)) redirect_url = urljoin(redirect_url_base, uuid) content_filename = self.get_filename(uuid, create=True) @@ -189,16 +195,19 @@ class Cryptor(BaseResource): return {'data': metadata} - @endpoint(name='file-decrypt', perm='can_decrypt', - description=_('Decrypt a file'), - pattern=r'(?P[\w-]+)$', - example_pattern='{uuid}/', - parameters={ - 'uuid': { - 'description': _('File identifier'), - 'example_value': '12345678-abcd-4321-abcd-123456789012', - }, - }) + @endpoint( + name='file-decrypt', + perm='can_decrypt', + description=_('Decrypt a file'), + pattern=r'(?P[\w-]+)$', + example_pattern='{uuid}/', + parameters={ + 'uuid': { + 'description': _('File identifier'), + 'example_value': '12345678-abcd-4321-abcd-123456789012', + }, + }, + ) def file_decrypt(self, request, uuid): if not self.private_key: raise APIError('missing private key') diff --git a/passerelle/apps/csvdatasource/__init__.py b/passerelle/apps/csvdatasource/__init__.py index 87b3b6ad..43e955d4 100644 --- a/passerelle/apps/csvdatasource/__init__.py +++ b/passerelle/apps/csvdatasource/__init__.py @@ -16,12 +16,15 @@ import django.apps + class AppConfig(django.apps.AppConfig): name = 'passerelle.apps.csvdatasource' label = 'csvdatasource' def get_connector_model(self): from . import models + return models.CsvDataSource + default_app_config = 'passerelle.apps.csvdatasource.AppConfig' diff --git a/passerelle/apps/csvdatasource/forms.py b/passerelle/apps/csvdatasource/forms.py index a98dbe95..f61fb055 100644 --- a/passerelle/apps/csvdatasource/forms.py +++ b/passerelle/apps/csvdatasource/forms.py @@ -41,25 +41,38 @@ class QueryForm(forms.ModelForm): if named: line = line.split(':', 1) if len(line) != 2: - errors.append(ValidationError( - _('Syntax error line %d: each line must be prefixed ' - 'with an identifier followed by a colon.') % (i + 1))) + errors.append( + ValidationError( + _( + 'Syntax error line %d: each line must be prefixed ' + 'with an identifier followed by a colon.' + ) + % (i + 1) + ) + ) continue name, line = line if not identifier_re.match(name): errors.append( - ValidationError(_('Syntax error line %d: invalid identifier, ' - 'it must starts with a letter and only ' - 'contains letters, digits and _.') % (i + 1))) + ValidationError( + _( + 'Syntax error line %d: invalid identifier, ' + 'it must starts with a letter and only ' + 'contains letters, digits and _.' + ) + % (i + 1) + ) + ) continue try: get_code(line) except SyntaxError as e: - errors.append(ValidationError( - _('Syntax error line %(line)d at character %(character)d') % { - 'line': i + 1, - 'character': e.offset - })) + errors.append( + ValidationError( + _('Syntax error line %(line)d at character %(character)d') + % {'line': i + 1, 'character': e.offset} + ) + ) if errors: raise ValidationError(errors) return lines diff --git a/passerelle/apps/csvdatasource/migrations/0001_initial.py b/passerelle/apps/csvdatasource/migrations/0001_initial.py index d89de06f..c25b605c 100644 --- a/passerelle/apps/csvdatasource/migrations/0001_initial.py +++ b/passerelle/apps/csvdatasource/migrations/0001_initial.py @@ -14,19 +14,41 @@ class Migration(migrations.Migration): migrations.CreateModel( name='CsvDataSource', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('csv_file', models.FileField(help_text='Supported file formats: csv, ods, xls, xlsx', - upload_to=b'csv', verbose_name='Spreadsheet file')), - ('columns_keynames', models.CharField(default=b'id, text', - help_text='ex: id,text,data1,data2', - max_length=256, - verbose_name='Column keynames', - blank=True)), + ( + 'csv_file', + models.FileField( + help_text='Supported file formats: csv, ods, xls, xlsx', + upload_to=b'csv', + verbose_name='Spreadsheet file', + ), + ), + ( + 'columns_keynames', + models.CharField( + default=b'id, text', + help_text='ex: id,text,data1,data2', + max_length=256, + verbose_name='Column keynames', + blank=True, + ), + ), ('skip_header', models.BooleanField(default=False, verbose_name='Skip first line')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_csvdatasource_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_csvdatasource_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'CSV File', diff --git a/passerelle/apps/csvdatasource/migrations/0002_csvdatasource_log_level.py b/passerelle/apps/csvdatasource/migrations/0002_csvdatasource_log_level.py index 531c7a2f..c493127b 100644 --- a/passerelle/apps/csvdatasource/migrations/0002_csvdatasource_log_level.py +++ b/passerelle/apps/csvdatasource/migrations/0002_csvdatasource_log_level.py @@ -14,7 +14,13 @@ class Migration(migrations.Migration): migrations.AddField( model_name='csvdatasource', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Debug Enabled', + blank=True, + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/csvdatasource/migrations/0003_auto_20160316_0910.py b/passerelle/apps/csvdatasource/migrations/0003_auto_20160316_0910.py index d561803e..bd3311a3 100644 --- a/passerelle/apps/csvdatasource/migrations/0003_auto_20160316_0910.py +++ b/passerelle/apps/csvdatasource/migrations/0003_auto_20160316_0910.py @@ -14,7 +14,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='csvdatasource', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/csvdatasource/migrations/0004_auto_20160407_0456.py b/passerelle/apps/csvdatasource/migrations/0004_auto_20160407_0456.py index 18af3631..00fbc8f9 100644 --- a/passerelle/apps/csvdatasource/migrations/0004_auto_20160407_0456.py +++ b/passerelle/apps/csvdatasource/migrations/0004_auto_20160407_0456.py @@ -14,7 +14,19 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='csvdatasource', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/csvdatasource/migrations/0007_query.py b/passerelle/apps/csvdatasource/migrations/0007_query.py index 81f97118..e8b4be41 100644 --- a/passerelle/apps/csvdatasource/migrations/0007_query.py +++ b/passerelle/apps/csvdatasource/migrations/0007_query.py @@ -14,13 +14,54 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Query', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('slug', models.SlugField(verbose_name='Name')), - ('filters', models.TextField(help_text='List of filter clauses (Python expression)', verbose_name='Filters', blank=True)), - ('projections', models.TextField(help_text='List of projections (name:expression)', verbose_name='Projections', blank=True)), - ('order', models.TextField(help_text='Columns to use for sorting rows', verbose_name='Sort Order', blank=True)), - ('distinct', models.TextField(help_text='Distinct columns', verbose_name='Distinct', blank=True)), - ('structure', models.CharField(choices=[(b'array', 'Array'), (b'dict', 'Dictionary'), (b'keyed-distinct', 'Keyed Dictionary'), (b'tuples', 'Tuples'), (b'onerow', 'Single Row'), (b'one', 'Single Value')], default=b'dict', help_text='Data structure used for the response', max_length=20, verbose_name='Structure')), + ( + 'filters', + models.TextField( + help_text='List of filter clauses (Python expression)', + verbose_name='Filters', + blank=True, + ), + ), + ( + 'projections', + models.TextField( + help_text='List of projections (name:expression)', + verbose_name='Projections', + blank=True, + ), + ), + ( + 'order', + models.TextField( + help_text='Columns to use for sorting rows', verbose_name='Sort Order', blank=True + ), + ), + ( + 'distinct', + models.TextField(help_text='Distinct columns', verbose_name='Distinct', blank=True), + ), + ( + 'structure', + models.CharField( + choices=[ + (b'array', 'Array'), + (b'dict', 'Dictionary'), + (b'keyed-distinct', 'Keyed Dictionary'), + (b'tuples', 'Tuples'), + (b'onerow', 'Single Row'), + (b'one', 'Single Value'), + ], + default=b'dict', + help_text='Data structure used for the response', + max_length=20, + verbose_name='Structure', + ), + ), ('resource', models.ForeignKey(to='csvdatasource.CsvDataSource', on_delete=models.CASCADE)), ], options={ diff --git a/passerelle/apps/csvdatasource/migrations/0011_auto_20180905_0936.py b/passerelle/apps/csvdatasource/migrations/0011_auto_20180905_0936.py index 9dc7d1f7..baaf98a5 100644 --- a/passerelle/apps/csvdatasource/migrations/0011_auto_20180905_0936.py +++ b/passerelle/apps/csvdatasource/migrations/0011_auto_20180905_0936.py @@ -17,7 +17,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='TableRow', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('line_number', models.IntegerField()), ('data', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=dict)), ], @@ -28,6 +31,8 @@ class Migration(migrations.Migration): migrations.AddField( model_name='tablerow', name='resource', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='csvdatasource.CsvDataSource'), + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to='csvdatasource.CsvDataSource' + ), ), ] diff --git a/passerelle/apps/csvdatasource/migrations/0014_query_set_slug.py b/passerelle/apps/csvdatasource/migrations/0014_query_set_slug.py index cd03965a..d1e45cc2 100644 --- a/passerelle/apps/csvdatasource/migrations/0014_query_set_slug.py +++ b/passerelle/apps/csvdatasource/migrations/0014_query_set_slug.py @@ -8,7 +8,9 @@ def generate_slug(instance): slug = instance.slug i = 1 while True: - queryset = instance._meta.model.objects.filter(slug=slug, resource=instance.resource).exclude(pk=instance.pk) + queryset = instance._meta.model.objects.filter(slug=slug, resource=instance.resource).exclude( + pk=instance.pk + ) if not queryset.exists(): break slug = '%s-%s' % (instance.slug, i) diff --git a/passerelle/apps/csvdatasource/migrations/0016_auto_20200406_1702.py b/passerelle/apps/csvdatasource/migrations/0016_auto_20200406_1702.py index b68f9b1e..6e2a67f3 100644 --- a/passerelle/apps/csvdatasource/migrations/0016_auto_20200406_1702.py +++ b/passerelle/apps/csvdatasource/migrations/0016_auto_20200406_1702.py @@ -16,21 +16,48 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='csvdatasource', name='columns_keynames', - field=models.CharField(blank=True, default='id, text', help_text='ex: id,text,data1,data2', max_length=256, verbose_name='Column keynames'), + field=models.CharField( + blank=True, + default='id, text', + help_text='ex: id,text,data1,data2', + max_length=256, + verbose_name='Column keynames', + ), ), migrations.AlterField( model_name='csvdatasource', name='csv_file', - field=models.FileField(help_text='Supported file formats: csv, ods, xls, xlsx', upload_to='csv', verbose_name='Spreadsheet file'), + field=models.FileField( + help_text='Supported file formats: csv, ods, xls, xlsx', + upload_to='csv', + verbose_name='Spreadsheet file', + ), ), migrations.AlterField( model_name='query', name='resource', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='queries', to='csvdatasource.CsvDataSource'), + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name='queries', + to='csvdatasource.CsvDataSource', + ), ), migrations.AlterField( model_name='query', name='structure', - field=models.CharField(choices=[('array', 'Array'), ('dict', 'Dictionary'), ('keyed-distinct', 'Keyed Dictionary'), ('tuples', 'Tuples'), ('onerow', 'Single Row'), ('one', 'Single Value')], default='dict', help_text='Data structure used for the response', max_length=20, verbose_name='Structure'), + field=models.CharField( + choices=[ + ('array', 'Array'), + ('dict', 'Dictionary'), + ('keyed-distinct', 'Keyed Dictionary'), + ('tuples', 'Tuples'), + ('onerow', 'Single Row'), + ('one', 'Single Value'), + ], + default='dict', + help_text='Data structure used for the response', + max_length=20, + verbose_name='Structure', + ), ), ] diff --git a/passerelle/apps/csvdatasource/migrations/0019_csv_upload_to.py b/passerelle/apps/csvdatasource/migrations/0019_csv_upload_to.py index ad9a9010..fa25a86d 100644 --- a/passerelle/apps/csvdatasource/migrations/0019_csv_upload_to.py +++ b/passerelle/apps/csvdatasource/migrations/0019_csv_upload_to.py @@ -15,6 +15,10 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='csvdatasource', name='csv_file', - field=models.FileField(help_text='Supported file formats: csv, ods, xls, xlsx', upload_to=passerelle.apps.csvdatasource.models.upload_to, verbose_name='Spreadsheet file'), + field=models.FileField( + help_text='Supported file formats: csv, ods, xls, xlsx', + upload_to=passerelle.apps.csvdatasource.models.upload_to, + verbose_name='Spreadsheet file', + ), ), ] diff --git a/passerelle/apps/csvdatasource/models.py b/passerelle/apps/csvdatasource/models.py index d8b88af9..d48b2269 100644 --- a/passerelle/apps/csvdatasource/models.py +++ b/passerelle/apps/csvdatasource/models.py @@ -52,7 +52,7 @@ code_cache = OrderedDict() def get_code(expr): # limit size of code cache to 1024 if len(code_cache) > 1024: - for key in list(code_cache.keys())[:len(code_cache) - 1024]: + for key in list(code_cache.keys())[: len(code_cache) - 1024]: code_cache.pop(key) if expr not in code_cache: code_cache[expr] = compile(expr, '', 'eval') @@ -65,21 +65,13 @@ class Query(models.Model): label = models.CharField(_('Label'), max_length=100) description = models.TextField(_('Description'), blank=True) filters = models.TextField( - _('Filters'), - blank=True, - help_text=_('List of filter clauses (Python expression)')) - order = models.TextField( - _('Sort Order'), - blank=True, - help_text=_('Columns to use for sorting rows')) - distinct = models.TextField( - _('Distinct'), - blank=True, - help_text=_('Distinct columns')) + _('Filters'), blank=True, help_text=_('List of filter clauses (Python expression)') + ) + order = models.TextField(_('Sort Order'), blank=True, help_text=_('Columns to use for sorting rows')) + distinct = models.TextField(_('Distinct'), blank=True, help_text=_('Distinct columns')) projections = models.TextField( - _('Projections'), - blank=True, - help_text=_('List of projections (name:expression)')) + _('Projections'), blank=True, help_text=_('List of projections (name:expression)') + ) structure = models.CharField( _('Structure'), max_length=20, @@ -89,9 +81,11 @@ class Query(models.Model): ('keyed-distinct', _('Keyed Dictionary')), ('tuples', _('Tuples')), ('onerow', _('Single Row')), - ('one', _('Single Value'))], + ('one', _('Single Value')), + ], default='dict', - help_text=_('Data structure used for the response')) + help_text=_('Data structure used for the response'), + ) class Meta: ordering = ['slug'] @@ -123,12 +117,10 @@ class Query(models.Model): return self.slug def delete_url(self): - return reverse('csv-delete-query', - kwargs={'connector_slug': self.resource.slug, 'pk': self.pk}) + return reverse('csv-delete-query', kwargs={'connector_slug': self.resource.slug, 'pk': self.pk}) def edit_url(self): - return reverse('csv-edit-query', - kwargs={'connector_slug': self.resource.slug, 'pk': self.pk}) + return reverse('csv-edit-query', kwargs={'connector_slug': self.resource.slug, 'pk': self.pk}) def upload_to(instance, filename): @@ -137,20 +129,23 @@ def upload_to(instance, filename): class CsvDataSource(BaseResource): csv_file = models.FileField( - _('Spreadsheet file'), - upload_to=upload_to, - help_text=_('Supported file formats: csv, ods, xls, xlsx')) + _('Spreadsheet file'), upload_to=upload_to, help_text=_('Supported file formats: csv, ods, xls, xlsx') + ) columns_keynames = models.CharField( max_length=256, verbose_name=_('Column keynames'), default='id, text', - help_text=_('ex: id,text,data1,data2'), blank=True) + help_text=_('ex: id,text,data1,data2'), + blank=True, + ) skip_header = models.BooleanField(_('Skip first line'), default=False) _dialect_options = JSONField(editable=False, null=True) sheet_name = models.CharField(_('Sheet name'), blank=True, max_length=150) category = _('Data Sources') - documentation_url = 'https://doc-publik.entrouvert.com/admin-fonctionnel/parametrage-avance/source-de-donnees-csv/' + documentation_url = ( + 'https://doc-publik.entrouvert.com/admin-fonctionnel/parametrage-avance/source-de-donnees-csv/' + ) class Meta: verbose_name = _('Spreadsheet File') @@ -173,9 +168,7 @@ class CsvDataSource(BaseResource): def _detect_dialect_options(self): content = self.get_content_without_bom() dialect = csv.Sniffer().sniff(content) - self.dialect_options = { - k: v for k, v in vars(dialect).items() if not k.startswith('_') - } + self.dialect_options = {k: v for k, v in vars(dialect).items() if not k.startswith('_')} def save(self, *args, **kwargs): cache = kwargs.pop('cache', True) @@ -193,7 +186,8 @@ class CsvDataSource(BaseResource): TableRow.objects.filter(resource=self).delete() for block in batch(enumerate(self.get_rows()), 5000): TableRow.objects.bulk_create( - TableRow(resource=self, line_number=i, data=data) for i, data in block) + TableRow(resource=self, line_number=i, data=data) for i, data in block + ) def csv_file_datetime(self): ctime = os.fstat(self.csv_file.fileno()).st_ctime @@ -205,8 +199,7 @@ class CsvDataSource(BaseResource): @property def dialect_options(self): - """turn dict items into string - """ + """turn dict items into string""" file_type = self.csv_file.name.split('.')[-1] if file_type in ('ods', 'xls', 'xlsx'): return None @@ -317,8 +310,7 @@ class CsvDataSource(BaseResource): query = Query(filters='\n'.join(filters)) return self.execute_query(request, query, query_params=params.dict()) - @endpoint(perm='can_access', methods=['get'], - name='query', pattern=r'^(?P[\w-]+)/$') + @endpoint(perm='can_access', methods=['get'], name='query', pattern=r'^(?P[\w-]+)/$') def select(self, request, query_name, **kwargs): try: query = Query.objects.get(resource=self.id, slug=query_name) @@ -371,8 +363,7 @@ class CsvDataSource(BaseResource): filters = query.get_list('filters') if filters: - data = [row for new_row, row in stream_expressions(filters, data, kind='filters') - if all(new_row)] + data = [row for new_row, row in stream_expressions(filters, data, kind='filters') if all(new_row)] order = query.get_list('order') if order: @@ -391,11 +382,13 @@ class CsvDataSource(BaseResource): try: hash(new_row) except TypeError: - raise APIError(u'distinct value is unhashable', - data={ - 'row': repr(row), - 'distinct': repr(new_row), - }) + raise APIError( + u'distinct value is unhashable', + data={ + 'row': repr(row), + 'distinct': repr(new_row), + }, + ) if new_row in seen: continue new_data.append(row) @@ -413,24 +406,21 @@ class CsvDataSource(BaseResource): titles.append(name) expressions.append(expr) new_data = [] - for new_row, row in stream_expressions(expressions, data, kind='projection', - titles=titles): + for new_row, row in stream_expressions(expressions, data, kind='projection', titles=titles): new_data.append(dict(zip(titles, new_row))) data = new_data if 'id' in request.GET: # always provide a ?id= filter. filters = ["id == %r" % force_text(request.GET['id'])] - data = [row for new_row, row in stream_expressions(filters, data, kind='filters') - if new_row[0]] + data = [row for new_row, row in stream_expressions(filters, data, kind='filters') if new_row[0]] # allow jsonp queries by select2 # filtering is done there after projection because we need a projection named text for # retro-compatibility with previous use of the csvdatasource with select2 if 'q' in request.GET: filters = ["%s in normalize(text.lower())" % repr(normalize(request.GET['q'].lower()))] - data = [row for new_row, row in stream_expressions(filters, data, kind='filters') - if new_row[0]] + data = [row for new_row, row in stream_expressions(filters, data, kind='filters') if new_row[0]] # force rendition of iterator as list data = list(data) @@ -450,7 +440,7 @@ class CsvDataSource(BaseResource): raise APIError('invalid offset parameter') # paginate data - data = data[offset:offset+limit] + data = data[offset : offset + limit] if query.structure == 'array': return {'data': [[row[t] for t in titles] for row in data]} @@ -539,4 +529,4 @@ class TableRow(models.Model): class Meta: ordering = ('line_number',) - unique_together = (('resource', 'line_number')) + unique_together = ('resource', 'line_number') diff --git a/passerelle/apps/csvdatasource/urls.py b/passerelle/apps/csvdatasource/urls.py index da90131a..503bceb7 100644 --- a/passerelle/apps/csvdatasource/urls.py +++ b/passerelle/apps/csvdatasource/urls.py @@ -19,12 +19,16 @@ from django.conf.urls import include, url from .views import * management_urlpatterns = [ - url(r'^(?P[\w,-]+)/download/$', - CsvDownload.as_view(), name='csv-download'), - url(r'^(?P[\w,-]+)/queries/new/$', - NewQueryView.as_view(), name='csv-new-query'), - url(r'^(?P[\w,-]+)/queries/(?P[\w,-]+)/$', - UpdateQueryView.as_view(), name='csv-edit-query'), - url(r'^(?P[\w,-]+)/queries/(?P[\w,-]+)/delete$', - DeleteQueryView.as_view(), name='csv-delete-query'), + url(r'^(?P[\w,-]+)/download/$', CsvDownload.as_view(), name='csv-download'), + url(r'^(?P[\w,-]+)/queries/new/$', NewQueryView.as_view(), name='csv-new-query'), + url( + r'^(?P[\w,-]+)/queries/(?P[\w,-]+)/$', + UpdateQueryView.as_view(), + name='csv-edit-query', + ), + url( + r'^(?P[\w,-]+)/queries/(?P[\w,-]+)/delete$', + DeleteQueryView.as_view(), + name='csv-delete-query', + ), ] diff --git a/passerelle/apps/family/loaders/concerto_fondettes.py b/passerelle/apps/family/loaders/concerto_fondettes.py index c07169dd..79ea69c1 100644 --- a/passerelle/apps/family/loaders/concerto_fondettes.py +++ b/passerelle/apps/family/loaders/concerto_fondettes.py @@ -29,9 +29,11 @@ from django.utils.translation import ugettext_lazy as _ from ..models import Invoice + def u(s): return force_text(s, 'iso-8859-15') + class Loader(object): def __init__(self, connector): self.connector = connector @@ -45,6 +47,7 @@ class Loader(object): fd = archive.open('data_full.csv') if six.PY3: import io + fd = io.TextIOWrapper(fd, 'iso-8859-15') csvfile = six.StringIO(fd.read()) csvreader = csv.reader(csvfile, delimiter='\t') @@ -59,9 +62,11 @@ class Loader(object): invoice['amount'] = str(Decimal(invoice['total_amount']) - paid_amount) invoice['paid'] = bool(Decimal(invoice['amount']) == 0) invoice['issue_date'] = datetime.datetime.strptime( - row['DAT_GENERATION_FAC'], '%d/%m/%Y').strftime('%Y-%m-%d') + row['DAT_GENERATION_FAC'], '%d/%m/%Y' + ).strftime('%Y-%m-%d') invoice['pay_limit_date'] = datetime.datetime.strptime( - row['DAT_LIMITEPAIE_FAC'], '%d/%m/%Y').strftime('%Y-%m-%d') + row['DAT_LIMITEPAIE_FAC'], '%d/%m/%Y' + ).strftime('%Y-%m-%d') invoice['online_payment'] = True invoice['no_online_payment_reason'] = None if not invoice['paid']: @@ -73,10 +78,12 @@ class Loader(object): invoice['online_payment'] = False invoice['no_online_payment_reason'] = 'autobilling' - obj, created = Invoice.objects.update_or_create(resource=self.connector, - external_id=row['ID_FAC'], defaults=invoice) + obj, created = Invoice.objects.update_or_create( + resource=self.connector, external_id=row['ID_FAC'], defaults=invoice + ) invoice_filename = '%s_%s.pdf' % ( - datetime.datetime.strptime(row['DAT_DEBUT_PGE'], '%d/%m/%Y').strftime('%Y-%m'), - row['ID_FAC']) + datetime.datetime.strptime(row['DAT_DEBUT_PGE'], '%d/%m/%Y').strftime('%Y-%m'), + row['ID_FAC'], + ) if invoice_filename in archive_files: obj.write_pdf(archive.read(invoice_filename)) diff --git a/passerelle/apps/family/loaders/concerto_orleans.py b/passerelle/apps/family/loaders/concerto_orleans.py index 3c0cda63..48a0dbb0 100644 --- a/passerelle/apps/family/loaders/concerto_orleans.py +++ b/passerelle/apps/family/loaders/concerto_orleans.py @@ -52,7 +52,9 @@ def normalize_adult(adult): def normalize_family(family, adults): return { 'external_id': family['id_fam'], - 'adults': [adults[family[id]] for id in ('id_per1', 'id_per2') if family[id] and adults.get(family[id])], + 'adults': [ + adults[family[id]] for id in ('id_per1', 'id_per2') if family[id] and adults.get(family[id]) + ], 'children': [], 'invoices': [], 'login': family['id_fam'], @@ -65,6 +67,7 @@ def normalize_family(family, adults): 'city': family['lib_commune_adr'], } + def normalize_child(child): sex = child['typ_sexe_per'] if sex == 'G': @@ -74,27 +77,30 @@ def normalize_child(child): 'first_name': child['lib_prenom_per'], 'last_name': child['lib_nom_per'], 'sex': sex, - 'birthdate': get_date(child['dat_naissance']) + 'birthdate': get_date(child['dat_naissance']), } + def normalize_invoice(i): - invoice = {'external_id': i['id_fac'], - 'label': i['id_fac'], - 'total_amount': Decimal(i['mnt_facture_fac']), - 'amount': Decimal(i['mnt_solde_fac']), - 'issue_date': i['dat_generation_fac'], - 'pay_limit_date': get_date(i['dat_limitepaie_fac']), - 'autobilling': i['on_prelevauto_ins'] == 'O', - 'online_payment': True, - 'payment_date': get_datetime(i['dat_reglement']), - 'litigation_date': get_date(i['dat_perception_fac']), - 'paid': Decimal(i['mnt_solde_fac']) == 0 + invoice = { + 'external_id': i['id_fac'], + 'label': i['id_fac'], + 'total_amount': Decimal(i['mnt_facture_fac']), + 'amount': Decimal(i['mnt_solde_fac']), + 'issue_date': i['dat_generation_fac'], + 'pay_limit_date': get_date(i['dat_limitepaie_fac']), + 'autobilling': i['on_prelevauto_ins'] == 'O', + 'online_payment': True, + 'payment_date': get_datetime(i['dat_reglement']), + 'litigation_date': get_date(i['dat_perception_fac']), + 'paid': Decimal(i['mnt_solde_fac']) == 0, } return invoice class Dialect(csv.Dialect): '''Because sometimes it cannot be sniffed by csv.Sniffer''' + delimiter = ';' doublequote = False escapechar = None @@ -104,13 +110,16 @@ class Dialect(csv.Dialect): class Loader(object): - def __init__(self, connector): self.connector = connector def clean(self, archive): - for filename in ('extract_prcit_personne.csv', 'extract_prcit_famille.csv', - 'extract_prcit_enfant.csv', 'extract_prcit_facture.csv'): + for filename in ( + 'extract_prcit_personne.csv', + 'extract_prcit_famille.csv', + 'extract_prcit_enfant.csv', + 'extract_prcit_facture.csv', + ): if not filename in archive.namelist(): raise ValidationError(_('Missing %(filename)s file in zip.') % {'filename': filename}) @@ -118,6 +127,7 @@ class Loader(object): fd = self.archive.open(filename) if six.PY3: import io + fd = io.TextIOWrapper(fd, 'iso-8859-15') reader = csv.reader(fd, Dialect) @@ -144,7 +154,6 @@ class Loader(object): families[invoice['id_fam']]['invoices'].append(normalize_invoice(invoice)) return families - def load(self, archive): self.archive = archive @@ -157,20 +166,32 @@ class Loader(object): import_start_timestamp = timezone.now() try: for family_data in families.values(): - data = dict_cherry_pick(family_data, - ('login', 'password', 'family_quotient', - 'zipcode', 'street_number', 'street_name', - 'address_complement', 'city')) - family, created = Family.objects.update_or_create(external_id=family_data['external_id'], - resource=self.connector, defaults=data) + data = dict_cherry_pick( + family_data, + ( + 'login', + 'password', + 'family_quotient', + 'zipcode', + 'street_number', + 'street_name', + 'address_complement', + 'city', + ), + ) + family, created = Family.objects.update_or_create( + external_id=family_data['external_id'], resource=self.connector, defaults=data + ) for adult_data in family_data.get('adults') or []: - Adult.objects.update_or_create(family=family, - external_id=adult_data['external_id'], defaults=adult_data) + Adult.objects.update_or_create( + family=family, external_id=adult_data['external_id'], defaults=adult_data + ) for child_data in family_data.get('children') or []: - Child.objects.get_or_create(family=family, - external_id=child_data['external_id'], defaults=child_data) + Child.objects.get_or_create( + family=family, external_id=child_data['external_id'], defaults=child_data + ) for invoice_data in family_data.get('invoices') or []: storage = DefaultStorage() @@ -179,17 +200,27 @@ class Loader(object): invoice_path = os.path.join(invoices_dir, invoice_filename) # create invoice object only if associated pdf exists if os.path.exists(invoice_path): - invoice, created = Invoice.objects.update_or_create(resource=self.connector, - family=family, external_id=invoice_data['external_id'], defaults=invoice_data) + invoice, created = Invoice.objects.update_or_create( + resource=self.connector, + family=family, + external_id=invoice_data['external_id'], + defaults=invoice_data, + ) except Exception as e: self.connector.logger.error('Error occured while importing data: %s', e) Family.objects.filter(resource=self.connector, update_timestamp__lte=import_start_timestamp).delete() - Adult.objects.filter(family__resource=self.connector, update_timestamp__lte=import_start_timestamp).delete() - Child.objects.filter(family__resource=self.connector, update_timestamp__lte=import_start_timestamp).delete() + Adult.objects.filter( + family__resource=self.connector, update_timestamp__lte=import_start_timestamp + ).delete() + Child.objects.filter( + family__resource=self.connector, update_timestamp__lte=import_start_timestamp + ).delete() # remove obsolete invoices and their pdfs - for invoice in Invoice.objects.filter(resource=self.connector, update_timestamp__lte=import_start_timestamp): + for invoice in Invoice.objects.filter( + resource=self.connector, update_timestamp__lte=import_start_timestamp + ): if invoice.has_pdf: os.unlink(invoice.pdf_filename()) invoice.delete() diff --git a/passerelle/apps/family/loaders/egee_thonon.py b/passerelle/apps/family/loaders/egee_thonon.py index 66fb5abd..e7a3cf20 100644 --- a/passerelle/apps/family/loaders/egee_thonon.py +++ b/passerelle/apps/family/loaders/egee_thonon.py @@ -49,7 +49,8 @@ class Loader: 'no_online_payment_reason': None, 'label': external_id, } - obj, created = Invoice.objects.update_or_create(resource=self.connector, - external_id=external_id, defaults=invoice) + obj, created = Invoice.objects.update_or_create( + resource=self.connector, external_id=external_id, defaults=invoice + ) external_ids.append(external_id) Invoice.objects.filter(resource=self.connector).exclude(external_id__in=external_ids).delete() diff --git a/passerelle/apps/family/management/commands/import_orleans_data.py b/passerelle/apps/family/management/commands/import_orleans_data.py index e01d4541..eb89991e 100644 --- a/passerelle/apps/family/management/commands/import_orleans_data.py +++ b/passerelle/apps/family/management/commands/import_orleans_data.py @@ -28,13 +28,17 @@ from passerelle.apps.family.models import GenericFamily, Invoice class Command(BaseCommand): - def add_arguments(self, parser): - parser.add_argument('-a', '--archive-file', dest='archive_file', - help='Archive containing data files', - default='exports_prcit.zip') - parser.add_argument('-c', '--connector', dest='connector', - help='Slug of the connector to import data into') + parser.add_argument( + '-a', + '--archive-file', + dest='archive_file', + help='Archive containing data files', + default='exports_prcit.zip', + ) + parser.add_argument( + '-c', '--connector', dest='connector', help='Slug of the connector to import data into' + ) def handle(self, *args, **options): @@ -50,7 +54,7 @@ class Command(BaseCommand): lock_filename = storage.path('family-%s/import-orleans-data.lock' % connector.id) try: fd = open(lock_filename, 'w') - fcntl.lockf(fd, fcntl.LOCK_EX|fcntl.LOCK_NB) + fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError: raise CommandError('Command already running.') diff --git a/passerelle/apps/family/migrations/0001_initial.py b/passerelle/apps/family/migrations/0001_initial.py index 84b3e637..d7fc97fc 100644 --- a/passerelle/apps/family/migrations/0001_initial.py +++ b/passerelle/apps/family/migrations/0001_initial.py @@ -14,17 +14,31 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Adult', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('external_id', models.CharField(max_length=32, verbose_name="Person's external id", db_index=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), + ( + 'external_id', + models.CharField(max_length=32, verbose_name="Person's external id", db_index=True), + ), ('first_name', models.CharField(max_length=64, verbose_name='First name')), ('last_name', models.CharField(max_length=64, verbose_name='Last name')), - ('sex', models.CharField(max_length=1, verbose_name='Sex', choices=[(b'M', 'Male'), (b'F', 'Female')])), + ( + 'sex', + models.CharField( + max_length=1, verbose_name='Sex', choices=[(b'M', 'Male'), (b'F', 'Female')] + ), + ), ('birthdate', models.DateField(null=True, verbose_name='Birthdate', blank=True)), ('phone', models.CharField(max_length=32, null=True, verbose_name='Phone')), ('cellphone', models.CharField(max_length=32, null=True, verbose_name='Cellphone')), ('street_number', models.CharField(max_length=32, null=True, verbose_name='Street number')), ('street_name', models.CharField(max_length=128, null=True, verbose_name='Street name')), - ('address_complement', models.CharField(max_length=64, null=True, verbose_name='Address complement')), + ( + 'address_complement', + models.CharField(max_length=64, null=True, verbose_name='Address complement'), + ), ('zipcode', models.CharField(max_length=16, null=True, verbose_name='Zipcode')), ('city', models.CharField(max_length=64, null=True, verbose_name='City')), ('country', models.CharField(max_length=128, null=True, verbose_name='Country')), @@ -37,11 +51,22 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Child', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('external_id', models.CharField(max_length=32, verbose_name="Person's external id", db_index=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), + ( + 'external_id', + models.CharField(max_length=32, verbose_name="Person's external id", db_index=True), + ), ('first_name', models.CharField(max_length=64, verbose_name='First name')), ('last_name', models.CharField(max_length=64, verbose_name='Last name')), - ('sex', models.CharField(max_length=1, verbose_name='Sex', choices=[(b'M', 'Male'), (b'F', 'Female')])), + ( + 'sex', + models.CharField( + max_length=1, verbose_name='Sex', choices=[(b'M', 'Male'), (b'F', 'Female')] + ), + ), ('birthdate', models.DateField(null=True, verbose_name='Birthdate', blank=True)), ], options={ @@ -51,22 +76,36 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Family', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('external_id', models.CharField(max_length=16, verbose_name='External id', db_index=True)), ('login', models.CharField(max_length=64, null=True, verbose_name='Login')), ('password', models.CharField(max_length=64, null=True, verbose_name='Password')), ('street_number', models.CharField(max_length=32, null=True, verbose_name='Street number')), ('street_name', models.CharField(max_length=128, null=True, verbose_name='Street name')), - ('address_complement', models.CharField(max_length=64, null=True, verbose_name='Address complement')), + ( + 'address_complement', + models.CharField(max_length=64, null=True, verbose_name='Address complement'), + ), ('zipcode', models.CharField(max_length=16, null=True, verbose_name='Zipcode')), ('city', models.CharField(max_length=64, null=True, verbose_name='City')), - ('family_quotient', models.DecimalField(default=0, verbose_name='Family quotient', max_digits=10, decimal_places=2)), + ( + 'family_quotient', + models.DecimalField( + default=0, verbose_name='Family quotient', max_digits=10, decimal_places=2 + ), + ), ], ), migrations.CreateModel( name='FamilyLink', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('name_id', models.CharField(max_length=256)), ('family', models.ForeignKey(to='family.Family', on_delete=models.CASCADE)), ], @@ -74,13 +113,39 @@ class Migration(migrations.Migration): migrations.CreateModel( name='GenericFamily', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), + ( + 'log_level', + models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), ('archive', models.FileField(upload_to=b'archives', verbose_name='Data Archive')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_genericfamily_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_genericfamily_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'Generic Family Connector', @@ -89,17 +154,31 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Invoice', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('external_id', models.CharField(max_length=128, verbose_name='External id', db_index=True)), ('label', models.CharField(max_length=128, null=True, verbose_name='Label')), ('issue_date', models.DateField(null=True, verbose_name='Issue date')), ('expiration_date', models.DateField(null=True, verbose_name='Expiration date')), ('litigation_date', models.DateField(null=True, verbose_name='Litigation date')), - ('total_amount', models.DecimalField(default=0, verbose_name='Total amount', max_digits=6, decimal_places=2)), - ('amount', models.DecimalField(default=0, verbose_name='Amount', max_digits=6, decimal_places=2)), + ( + 'total_amount', + models.DecimalField( + default=0, verbose_name='Total amount', max_digits=6, decimal_places=2 + ), + ), + ( + 'amount', + models.DecimalField(default=0, verbose_name='Amount', max_digits=6, decimal_places=2), + ), ('payment_date', models.DateTimeField(null=True, verbose_name='Payment date')), ('autobilling', models.BooleanField(default=False, verbose_name='Autobilling')), - ('payment_transaction_id', models.CharField(max_length=128, null=True, verbose_name='Payment transaction id')), + ( + 'payment_transaction_id', + models.CharField(max_length=128, null=True, verbose_name='Payment transaction id'), + ), ('family', models.ForeignKey(to='family.Family', on_delete=models.CASCADE)), ('resource', models.ForeignKey(to='family.GenericFamily', on_delete=models.CASCADE)), ], diff --git a/passerelle/apps/family/migrations/0003_auto_20161021_0333.py b/passerelle/apps/family/migrations/0003_auto_20161021_0333.py index a88902d1..3f6bf24b 100644 --- a/passerelle/apps/family/migrations/0003_auto_20161021_0333.py +++ b/passerelle/apps/family/migrations/0003_auto_20161021_0333.py @@ -14,7 +14,16 @@ class Migration(migrations.Migration): migrations.AddField( model_name='genericfamily', name='file_format', - field=models.CharField(default=b'native', max_length=40, verbose_name='File Format', choices=[(b'native', 'Native'), (b'concerto_fondettes', 'Concerto extract from Fondettes'), (b'concerto_orleans', 'Concerto extract from Orl\xe9ans')]), + field=models.CharField( + default=b'native', + max_length=40, + verbose_name='File Format', + choices=[ + (b'native', 'Native'), + (b'concerto_fondettes', 'Concerto extract from Fondettes'), + (b'concerto_orleans', 'Concerto extract from Orl\xe9ans'), + ], + ), ), migrations.AlterField( model_name='invoice', diff --git a/passerelle/apps/family/migrations/0007_auto_20161122_1816.py b/passerelle/apps/family/migrations/0007_auto_20161122_1816.py index 0662ef20..5115709d 100644 --- a/passerelle/apps/family/migrations/0007_auto_20161122_1816.py +++ b/passerelle/apps/family/migrations/0007_auto_20161122_1816.py @@ -16,49 +16,65 @@ class Migration(migrations.Migration): migrations.AddField( model_name='adult', name='creation_timestamp', - field=models.DateTimeField(default=datetime.datetime(2016, 11, 22, 17, 15, 39, 968134, tzinfo=utc), auto_now_add=True), + field=models.DateTimeField( + default=datetime.datetime(2016, 11, 22, 17, 15, 39, 968134, tzinfo=utc), auto_now_add=True + ), preserve_default=False, ), migrations.AddField( model_name='adult', name='update_timestamp', - field=models.DateTimeField(default=datetime.datetime(2016, 11, 22, 17, 15, 43, 378414, tzinfo=utc), auto_now=True), + field=models.DateTimeField( + default=datetime.datetime(2016, 11, 22, 17, 15, 43, 378414, tzinfo=utc), auto_now=True + ), preserve_default=False, ), migrations.AddField( model_name='child', name='creation_timestamp', - field=models.DateTimeField(default=datetime.datetime(2016, 11, 22, 17, 15, 47, 471108, tzinfo=utc), auto_now_add=True), + field=models.DateTimeField( + default=datetime.datetime(2016, 11, 22, 17, 15, 47, 471108, tzinfo=utc), auto_now_add=True + ), preserve_default=False, ), migrations.AddField( model_name='child', name='update_timestamp', - field=models.DateTimeField(default=datetime.datetime(2016, 11, 22, 17, 15, 52, 71574, tzinfo=utc), auto_now=True), + field=models.DateTimeField( + default=datetime.datetime(2016, 11, 22, 17, 15, 52, 71574, tzinfo=utc), auto_now=True + ), preserve_default=False, ), migrations.AddField( model_name='family', name='creation_timestamp', - field=models.DateTimeField(default=datetime.datetime(2016, 11, 22, 17, 15, 55, 626407, tzinfo=utc), auto_now_add=True), + field=models.DateTimeField( + default=datetime.datetime(2016, 11, 22, 17, 15, 55, 626407, tzinfo=utc), auto_now_add=True + ), preserve_default=False, ), migrations.AddField( model_name='family', name='update_timestamp', - field=models.DateTimeField(default=datetime.datetime(2016, 11, 22, 17, 16, 0, 117260, tzinfo=utc), auto_now=True), + field=models.DateTimeField( + default=datetime.datetime(2016, 11, 22, 17, 16, 0, 117260, tzinfo=utc), auto_now=True + ), preserve_default=False, ), migrations.AddField( model_name='invoice', name='creation_timestamp', - field=models.DateTimeField(default=datetime.datetime(2016, 11, 22, 17, 16, 3, 925696, tzinfo=utc), auto_now_add=True), + field=models.DateTimeField( + default=datetime.datetime(2016, 11, 22, 17, 16, 3, 925696, tzinfo=utc), auto_now_add=True + ), preserve_default=False, ), migrations.AddField( model_name='invoice', name='update_timestamp', - field=models.DateTimeField(default=datetime.datetime(2016, 11, 22, 17, 16, 7, 805483, tzinfo=utc), auto_now=True), + field=models.DateTimeField( + default=datetime.datetime(2016, 11, 22, 17, 16, 7, 805483, tzinfo=utc), auto_now=True + ), preserve_default=False, ), ] diff --git a/passerelle/apps/family/migrations/0011_auto_20200803_2326.py b/passerelle/apps/family/migrations/0011_auto_20200803_2326.py index 4ec48103..dad72839 100644 --- a/passerelle/apps/family/migrations/0011_auto_20200803_2326.py +++ b/passerelle/apps/family/migrations/0011_auto_20200803_2326.py @@ -15,12 +15,16 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='adult', name='sex', - field=models.CharField(choices=[('M', 'Male'), ('F', 'Female')], max_length=1, verbose_name='Sex'), + field=models.CharField( + choices=[('M', 'Male'), ('F', 'Female')], max_length=1, verbose_name='Sex' + ), ), migrations.AlterField( model_name='child', name='sex', - field=models.CharField(choices=[('M', 'Male'), ('F', 'Female')], max_length=1, verbose_name='Sex'), + field=models.CharField( + choices=[('M', 'Male'), ('F', 'Female')], max_length=1, verbose_name='Sex' + ), ), migrations.AlterField( model_name='genericfamily', @@ -30,7 +34,16 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='genericfamily', name='file_format', - field=models.CharField(choices=[('native', 'Native'), ('concerto_fondettes', 'Concerto extract from Fondettes'), ('concerto_orleans', 'Concerto extract from Orléans')], default='native', max_length=40, verbose_name='File Format'), + field=models.CharField( + choices=[ + ('native', 'Native'), + ('concerto_fondettes', 'Concerto extract from Fondettes'), + ('concerto_orleans', 'Concerto extract from Orléans'), + ], + default='native', + max_length=40, + verbose_name='File Format', + ), ), migrations.AlterField( model_name='invoice', diff --git a/passerelle/apps/family/models.py b/passerelle/apps/family/models.py index 0df35b0e..9e3379d2 100644 --- a/passerelle/apps/family/models.py +++ b/passerelle/apps/family/models.py @@ -79,22 +79,32 @@ def format_address(data): def format_family(family): data = {'quotient': family.family_quotient} - for attr in ('street_name', 'street_number', 'address_complement', - 'zipcode', 'city', 'country'): + for attr in ('street_name', 'street_number', 'address_complement', 'zipcode', 'city', 'country'): data[attr] = getattr(family, attr, None) or '' data['address'] = format_address(data) return data def format_person(p): - data = {'id': str(p.id), - 'text': p.fullname, - 'first_name': p.first_name, - 'last_name': p.last_name, - 'birthdate': p.birthdate, - 'sex': p.sex} - for attr in ('phone', 'cellphone', 'street_name', 'street_number', 'address_complement', - 'zipcode', 'city', 'country', 'email'): + data = { + 'id': str(p.id), + 'text': p.fullname, + 'first_name': p.first_name, + 'last_name': p.last_name, + 'birthdate': p.birthdate, + 'sex': p.sex, + } + for attr in ( + 'phone', + 'cellphone', + 'street_name', + 'street_number', + 'address_complement', + 'zipcode', + 'city', + 'country', + 'email', + ): data[attr] = getattr(p, attr, None) or '' data['address'] = format_address(data) return data @@ -113,7 +123,7 @@ def format_invoice(i): 'paid': i.paid, 'online_payment': i.online_payment, 'no_online_payment_reason': i.no_online_payment_reason, - 'has_pdf': i.has_pdf + 'has_pdf': i.has_pdf, } if now().date() > i.pay_limit_date: invoice['online_payment'] = False @@ -142,14 +152,16 @@ class GenericFamily(BaseResource): category = _('Business Process Connectors') archive = models.FileField(_('Data Archive'), upload_to='archives', max_length=256) file_format = models.CharField( - _('File Format'), max_length=40, + _('File Format'), + max_length=40, choices=( ('native', _('Native')), ('concerto_fondettes', _('Concerto extract from Fondettes')), ('concerto_orleans', _(u'Concerto extract from Orléans')), ('egee_thonon', _(u'Egee Invoices from Thonon Agglomération')), ), - default='native') + default='native', + ) class Meta: verbose_name = _('Generic Family Connector') @@ -205,42 +217,46 @@ class GenericFamily(BaseResource): family_data.update(address) data = dict_cherry_pick( family_data, - ('login', - 'password', - 'family_quotient', - ('number', 'street_number'), - ('postal_code', 'zipcode'), - ('street', 'street_name'), - ('complement', 'address_complement'))) + ( + 'login', + 'password', + 'family_quotient', + ('number', 'street_number'), + ('postal_code', 'zipcode'), + ('street', 'street_name'), + ('complement', 'address_complement'), + ), + ) family, created = Family.objects.update_or_create( - external_id=family_data['id'], resource=self, defaults=data) + external_id=family_data['id'], resource=self, defaults=data + ) for adult in family_data.get('adults') or []: adults.append(adult['id']) adult_address = adult.get('address') or {} adult.update(adult_address) data = dict_cherry_pick( adult, - ('first_name', - 'last_name', - 'phone', - ('mobile', 'cellphone'), - 'sex', - ('number', 'street_number'), - ('postal_code', 'zipcode'), - ('street', 'street_name'), - ('complement', 'address_complement'), - 'country')) - Adult.objects.update_or_create( - family=family, external_id=adult['id'], defaults=data) + ( + 'first_name', + 'last_name', + 'phone', + ('mobile', 'cellphone'), + 'sex', + ('number', 'street_number'), + ('postal_code', 'zipcode'), + ('street', 'street_name'), + ('complement', 'address_complement'), + 'country', + ), + ) + Adult.objects.update_or_create(family=family, external_id=adult['id'], defaults=data) # cleanup adults Adult.objects.exclude(external_id__in=adults).delete() for child in family_data.get('children') or []: children.append(child['id']) data = dict_cherry_pick(child, ('first_name', 'last_name', 'sex', 'birthdate')) - Child.objects.get_or_create(family=family, - external_id=child['id'], - defaults=data) + Child.objects.get_or_create(family=family, external_id=child['id'], defaults=data) # cleanup children Child.objects.exclude(external_id__in=children).delete() @@ -248,11 +264,17 @@ class GenericFamily(BaseResource): invoices.append(invoice['id']) data = dict_cherry_pick( invoice, - ('label', - ('created', 'issue_date'), - 'pay_limit_date', - 'litigation_date', 'total_amount', 'payment_date', - 'amount', 'autobilling')) + ( + 'label', + ('created', 'issue_date'), + 'pay_limit_date', + 'litigation_date', + 'total_amount', + 'payment_date', + 'amount', + 'autobilling', + ), + ) for date_attribute in data.keys(): if not date_attribute.endswith('_date'): continue @@ -261,10 +283,9 @@ class GenericFamily(BaseResource): else: data[date_attribute] = get_date(data[date_attribute]) data['paid'] = bool(data.get('payment_date')) - Invoice.objects.update_or_create(resource=self, - family=family, - external_id=invoice['id'], - defaults=data) + Invoice.objects.update_or_create( + resource=self, family=family, external_id=invoice['id'], defaults=data + ) if 'invoices/%s.pdf' % invoice['id'] in archive_files: with open(os.path.join(invoices_dir, '%s.pdf' % invoice['id']), 'wb') as fp: fp.write(archive.read('invoices/%s.pdf' % invoice['id'])) @@ -363,24 +384,23 @@ class GenericFamily(BaseResource): except Invoice.DoesNotExist: return None - @endpoint(name='regie', perm='can_access', - pattern=r'^invoice/(?P\w+)/$') + @endpoint(name='regie', perm='can_access', pattern=r'^invoice/(?P\w+)/$') def get_invoice_details(self, request, invoice_id, NameID=None, email=None, **kwargs): invoice = self.get_invoice(invoice_id) if not invoice: return {'data': None} return {'data': format_invoice(invoice)} - @endpoint(name='regie', perm='can_access', - pattern=r'^invoice/(?P\w+)/pdf/$') + @endpoint(name='regie', perm='can_access', pattern=r'^invoice/(?P\w+)/pdf/$') def get_invoice_pdf(self, request, invoice_id, **kwargs): invoice = self.get_invoice(invoice_id) if not invoice: raise FileNotFoundError return invoice.get_pdf() - @endpoint(name='regie', methods=['post'], - perm='can_access', pattern=r'^invoice/(?P\w+)/pay/$') + @endpoint( + name='regie', methods=['post'], perm='can_access', pattern=r'^invoice/(?P\w+)/pay/$' + ) def pay_invoice(self, request, invoice_id, **kwargs): data = json_loads(request.body) invoice = self.get_invoice(invoice_id) @@ -397,12 +417,13 @@ class GenericFamily(BaseResource): @endpoint(name='regie', perm='can_access', pattern='^users/with-pending-invoices/$') def get_pending_invoices_by_nameid(self, request): data = defaultdict(lambda: {'invoices': []}) - for i in (Invoice.objects.filter( - payment_date__isnull=True, - family__resource=self, - family__familylink__isnull=False) - .select_related('family') - .prefetch_related('family__familylink_set')): + for i in ( + Invoice.objects.filter( + payment_date__isnull=True, family__resource=self, family__familylink__isnull=False + ) + .select_related('family') + .prefetch_related('family__familylink_set') + ): name_id = i.family.familylink_set.all()[0].name_id data[name_id]['invoices'].append(format_invoice(i)) return {'data': data} @@ -416,22 +437,18 @@ class FamilyLink(models.Model): class Family(models.Model): resource = models.ForeignKey('GenericFamily', on_delete=models.CASCADE) - external_id = models.CharField(_('External id'), - max_length=16, db_index=True) + external_id = models.CharField(_('External id'), max_length=16, db_index=True) login = models.CharField(_('Login'), max_length=64, null=True) password = models.CharField(_('Password'), max_length=64, null=True) street_number = models.CharField(_('Street number'), max_length=32, null=True) street_name = models.CharField(_('Street name'), max_length=128, null=True) - address_complement = models.CharField(_('Address complement'), - max_length=64, null=True) + address_complement = models.CharField(_('Address complement'), max_length=64, null=True) zipcode = models.CharField(_('Zipcode'), max_length=16, null=True) city = models.CharField(_('City'), max_length=64, null=True) - family_quotient = models.DecimalField(_('Family quotient'), max_digits=10, - decimal_places=2, default=0) + family_quotient = models.DecimalField(_('Family quotient'), max_digits=10, decimal_places=2, default=0) creation_timestamp = models.DateTimeField(auto_now_add=True) update_timestamp = models.DateTimeField(auto_now=True) - def get_display_id(self): return self.external_id @@ -439,8 +456,7 @@ class Family(models.Model): @six.python_2_unicode_compatible class Person(models.Model): family = models.ForeignKey('Family', on_delete=models.CASCADE) - external_id = models.CharField(_('Person\'s external id'), max_length=32, - db_index=True) + external_id = models.CharField(_('Person\'s external id'), max_length=32, db_index=True) first_name = models.CharField(_('First name'), max_length=64) last_name = models.CharField(_('Last name'), max_length=64) sex = models.CharField(_('Sex'), max_length=1, choices=SEXES) @@ -464,8 +480,7 @@ class Adult(Person): cellphone = models.CharField(_('Cellphone'), max_length=32, null=True) street_number = models.CharField(_('Street number'), max_length=32, null=True) street_name = models.CharField(_('Street name'), max_length=128, null=True) - address_complement = models.CharField(_('Address complement'), - max_length=64, null=True) + address_complement = models.CharField(_('Address complement'), max_length=64, null=True) zipcode = models.CharField(_('Zipcode'), max_length=16, null=True) city = models.CharField(_('City'), max_length=64, null=True) country = models.CharField(_('Country'), max_length=128, null=True) @@ -484,17 +499,14 @@ class Invoice(models.Model): issue_date = models.DateField(_('Issue date'), null=True) pay_limit_date = models.DateField(_('Due date'), null=True) litigation_date = models.DateField(_('Litigation date'), null=True) - total_amount = models.DecimalField(_('Total amount'), max_digits=8, - decimal_places=2, default=0) + total_amount = models.DecimalField(_('Total amount'), max_digits=8, decimal_places=2, default=0) amount = models.DecimalField(_('Amount'), max_digits=8, decimal_places=2, default=0) payment_date = models.DateTimeField(_('Payment date'), null=True) paid = models.BooleanField(_('Paid'), default=False) autobilling = models.BooleanField(_('Autobilling'), default=False) online_payment = models.BooleanField(_('Online payment'), default=True) - no_online_payment_reason = models.CharField(_('No online payment reason'), - max_length=100, null=True) - payment_transaction_id = models.CharField(_('Payment transaction id'), - max_length=128, null=True) + no_online_payment_reason = models.CharField(_('No online payment reason'), max_length=100, null=True) + payment_transaction_id = models.CharField(_('Payment transaction id'), max_length=128, null=True) creation_timestamp = models.DateTimeField(auto_now_add=True) update_timestamp = models.DateTimeField(auto_now=True) diff --git a/passerelle/apps/feeds/migrations/0001_initial.py b/passerelle/apps/feeds/migrations/0001_initial.py index e6438099..3d0ca18f 100644 --- a/passerelle/apps/feeds/migrations/0001_initial.py +++ b/passerelle/apps/feeds/migrations/0001_initial.py @@ -14,13 +14,36 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Feed', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), ('url', models.URLField(max_length=1000, verbose_name='URL')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_feed_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_feed_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': 'Feed', diff --git a/passerelle/apps/gdc/migrations/0001_initial.py b/passerelle/apps/gdc/migrations/0001_initial.py index 3657e114..8d710dc5 100644 --- a/passerelle/apps/gdc/migrations/0001_initial.py +++ b/passerelle/apps/gdc/migrations/0001_initial.py @@ -14,12 +14,25 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Gdc', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('service_url', models.CharField(help_text='GDC Web Service URL', max_length=128, verbose_name='Service URL')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_gdc_users_+', related_query_name='+', blank=True)), + ( + 'service_url', + models.CharField( + help_text='GDC Web Service URL', max_length=128, verbose_name='Service URL' + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_gdc_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': 'GDC Web Service', diff --git a/passerelle/apps/gdc/migrations/0002_gdc_log_level.py b/passerelle/apps/gdc/migrations/0002_gdc_log_level.py index 8b6a3082..6146d00c 100644 --- a/passerelle/apps/gdc/migrations/0002_gdc_log_level.py +++ b/passerelle/apps/gdc/migrations/0002_gdc_log_level.py @@ -14,7 +14,13 @@ class Migration(migrations.Migration): migrations.AddField( model_name='gdc', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Debug Enabled', + blank=True, + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/gdc/migrations/0003_auto_20160316_0910.py b/passerelle/apps/gdc/migrations/0003_auto_20160316_0910.py index a66b7e28..d5c5b82c 100644 --- a/passerelle/apps/gdc/migrations/0003_auto_20160316_0910.py +++ b/passerelle/apps/gdc/migrations/0003_auto_20160316_0910.py @@ -14,7 +14,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='gdc', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/gdc/migrations/0004_auto_20160407_0456.py b/passerelle/apps/gdc/migrations/0004_auto_20160407_0456.py index 73eb1457..08913a01 100644 --- a/passerelle/apps/gdc/migrations/0004_auto_20160407_0456.py +++ b/passerelle/apps/gdc/migrations/0004_auto_20160407_0456.py @@ -14,7 +14,19 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='gdc', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/gdc/models.py b/passerelle/apps/gdc/models.py index 400162bc..167c4a1a 100644 --- a/passerelle/apps/gdc/models.py +++ b/passerelle/apps/gdc/models.py @@ -52,9 +52,9 @@ def phpserialize_loads(s): class Gdc(BaseResource): - service_url = models.CharField(max_length=128, blank=False, - verbose_name=_('Service URL'), - help_text=_('GDC Web Service URL')) + service_url = models.CharField( + max_length=128, blank=False, verbose_name=_('Service URL'), help_text=_('GDC Web Service URL') + ) category = _('Business Process Connectors') @@ -62,21 +62,23 @@ class Gdc(BaseResource): verbose_name = _('GDC Web Service') def call_soap(self, action, *args, **kwargs): - def escape(s): return force_text(s).replace('&', '&').replace('>', '>').replace('<', '<') params = [] for i, arg in enumerate(args): - params.append('%(value)s' % { - 'i': i + 1, 'value': escape(arg)}) + params.append( + '%(value)s' % {'i': i + 1, 'value': escape(arg)} + ) for key, value in kwargs.items(): if value is None: params.append('<%s xsi:null="1"/>' % key) continue type_ = 'int' if isinstance(value, int) else 'string' - params.append('<%(key)s xsi:type="xsd:%(type)s">%(value)s' % { - 'key': key, 'type': type_, 'value': escape(value)}) + params.append( + '<%(key)s xsi:type="xsd:%(type)s">%(value)s' + % {'key': key, 'type': type_, 'value': escape(value)} + ) data = """ -""" % {'action': action, 'params': '\n'.join(params)} +""" % { + 'action': action, + 'params': '\n'.join(params), + } resp = self.requests.post( - self.service_url, - data=data.encode('utf-8'), - headers={'SOAPAction': '"%s"' % action, - 'Content-type': 'text/xml; charset=UTF-8'}) + self.service_url, + data=data.encode('utf-8'), + headers={'SOAPAction': '"%s"' % action, 'Content-type': 'text/xml; charset=UTF-8'}, + ) return ET.ElementTree(ET.fromstring(resp.content)) diff --git a/passerelle/apps/gdc/urls.py b/passerelle/apps/gdc/urls.py index 50e4ce28..a47d6844 100644 --- a/passerelle/apps/gdc/urls.py +++ b/passerelle/apps/gdc/urls.py @@ -20,7 +20,6 @@ from .views import GdcDetailView, VoiesView, PostDemandeView, StatusView urlpatterns = [ url(r'^(?P[\w,-]+)/$', GdcDetailView.as_view(), name='gdc-view'), - url(r'^(?P[\w,-]+)/voies/(?P\d+)$', VoiesView.as_view(), name='gdc-voies'), url(r'^(?P[\w,-]+)/post/demande$', csrf_exempt(PostDemandeView.as_view()), name='gdc-post'), url(r'^(?P[\w,-]+)/status/(?P\d+)', StatusView.as_view(), name='gdc-status'), diff --git a/passerelle/apps/gdc/views.py b/passerelle/apps/gdc/views.py index 61967c48..5422a413 100644 --- a/passerelle/apps/gdc/views.py +++ b/passerelle/apps/gdc/views.py @@ -56,45 +56,45 @@ def get_voies(service, insee): soap_result = phpserialize_loads(resp.findall('.//listeVoie')[0].text) result = [] prefix_map = { - 'ALL': 'ALLEE', - 'AUTO': 'AUTOROUTE', - 'AV': 'AVENUE', - 'BASS': 'BASSIN', - 'BD': 'BOULEVARD', - 'CAR': 'CARREFOUR', - 'CHE': 'CHEMIN', - 'COUR': 'COUR', - 'CRS': 'COURS', - 'DESC': 'DESCENTE', - 'DOM': 'DOMAINE', - 'ENCL': 'ENCLOS', - 'ESP': 'ESPLANADE', - 'ESPA': 'ESPACE', - 'GR': '', # "GR GRAND-RUE JEAN MOULIN" - 'IMP': 'IMPASSE', - 'JARD': 'JARDIN', - 'MAIL': '', # "MAIL LE GRAND MAIL" - 'PARC': 'PARC', - 'PARV': '', # "PARV PARVIS DE LA LEGION D HONNEUR" - 'PAS': 'PASSAGE', - 'PL': 'PLACE', - 'PLAN': 'PLAN', - 'PONT': 'PONT', - 'QUA': 'QUAI', - 'R': 'RUE', - 'RAMB': '', # "RAMB RAMBLA DES CALISSONS" - 'RPT': 'ROND-POINT', - 'RTE': 'ROUTE', - 'SQ': 'SQUARE', - 'TSSE': '', # "TSSE TERRASSE DES ALLEES DU BOIS" - 'TUN': 'TUNNEL', - 'VIAD': 'VIADUC', - 'VOI': 'VOIE', + 'ALL': 'ALLEE', + 'AUTO': 'AUTOROUTE', + 'AV': 'AVENUE', + 'BASS': 'BASSIN', + 'BD': 'BOULEVARD', + 'CAR': 'CARREFOUR', + 'CHE': 'CHEMIN', + 'COUR': 'COUR', + 'CRS': 'COURS', + 'DESC': 'DESCENTE', + 'DOM': 'DOMAINE', + 'ENCL': 'ENCLOS', + 'ESP': 'ESPLANADE', + 'ESPA': 'ESPACE', + 'GR': '', # "GR GRAND-RUE JEAN MOULIN" + 'IMP': 'IMPASSE', + 'JARD': 'JARDIN', + 'MAIL': '', # "MAIL LE GRAND MAIL" + 'PARC': 'PARC', + 'PARV': '', # "PARV PARVIS DE LA LEGION D HONNEUR" + 'PAS': 'PASSAGE', + 'PL': 'PLACE', + 'PLAN': 'PLAN', + 'PONT': 'PONT', + 'QUA': 'QUAI', + 'R': 'RUE', + 'RAMB': '', # "RAMB RAMBLA DES CALISSONS" + 'RPT': 'ROND-POINT', + 'RTE': 'ROUTE', + 'SQ': 'SQUARE', + 'TSSE': '', # "TSSE TERRASSE DES ALLEES DU BOIS" + 'TUN': 'TUNNEL', + 'VIAD': 'VIADUC', + 'VOI': 'VOIE', } for k, v in soap_result.items(): for prefix, full in prefix_map.items(): if v.startswith(prefix + ' '): - v = (full + v[len(prefix):]).strip() + v = (full + v[len(prefix) :]).strip() result.append({'id': k, 'text': v}) result.sort(key=lambda x: x['id']) return result @@ -178,10 +178,12 @@ class PostDemandeView(View, SingleObjectMixin): else: code_retour = force_text(resp.findall('.//code_retour')[0].text) result = phpserialize_loads(resp.findall('.//listeInfo')[0].text) - result = {'result': code_retour, - 'display_id': result.get('IDENTIFIANT'), - 'id': result.get('IDENTIFIANT'), - 'details': result} + result = { + 'result': code_retour, + 'display_id': result.get('IDENTIFIANT'), + 'id': result.get('IDENTIFIANT'), + 'details': result, + } return utils.response_for_json(request, result) diff --git a/passerelle/apps/gesbac/migrations/0001_initial.py b/passerelle/apps/gesbac/migrations/0001_initial.py index 5117375d..661930b8 100644 --- a/passerelle/apps/gesbac/migrations/0001_initial.py +++ b/passerelle/apps/gesbac/migrations/0001_initial.py @@ -20,11 +20,21 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Form', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('form_id', models.CharField(max_length=64)), ('creation_datetime', models.DateTimeField(auto_now_add=True)), ('filename', models.CharField(max_length=128, null=True)), - ('status', models.CharField(choices=[(b'new', b'New'), (b'sent', b'Sent'), (b'closed', b'Closed')], default=b'new', max_length=8)), + ( + 'status', + models.CharField( + choices=[(b'new', b'New'), (b'sent', b'Sent'), (b'closed', b'Closed')], + default=b'new', + max_length=8, + ), + ), ('demand_data', django.contrib.postgres.fields.jsonb.JSONField(default=dict)), ('card_data', django.contrib.postgres.fields.jsonb.JSONField(default=dict)), ], @@ -35,15 +45,29 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Gesbac', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), - ('outcoming_sftp', passerelle.utils.sftp.SFTPField(default=None, verbose_name='Outcoming SFTP')), - ('incoming_sftp', passerelle.utils.sftp.SFTPField(default=None, verbose_name='Incoming SFTP')), + ( + 'outcoming_sftp', + passerelle.utils.sftp.SFTPField(default=None, verbose_name='Outcoming SFTP'), + ), + ( + 'incoming_sftp', + passerelle.utils.sftp.SFTPField(default=None, verbose_name='Incoming SFTP'), + ), ('output_files_prefix', models.CharField(max_length=32, verbose_name='Output files prefix')), ('input_files_prefix', models.CharField(max_length=32, verbose_name='Input files prefix')), - ('users', models.ManyToManyField(blank=True, related_name='_gesbac_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'users', + models.ManyToManyField( + blank=True, related_name='_gesbac_users_+', related_query_name='+', to='base.ApiUser' + ), + ), ], options={ 'verbose_name': 'Gesbac', diff --git a/passerelle/apps/gesbac/migrations/0003_auto_20200504_1402.py b/passerelle/apps/gesbac/migrations/0003_auto_20200504_1402.py index cdd7660d..3b03f8ff 100644 --- a/passerelle/apps/gesbac/migrations/0003_auto_20200504_1402.py +++ b/passerelle/apps/gesbac/migrations/0003_auto_20200504_1402.py @@ -15,6 +15,8 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='form', name='status', - field=models.CharField(choices=[('new', 'New'), ('sent', 'Sent'), ('closed', 'Closed')], default='new', max_length=8), + field=models.CharField( + choices=[('new', 'New'), ('sent', 'Sent'), ('closed', 'Closed')], default='new', max_length=8 + ), ), ] diff --git a/passerelle/apps/gesbac/models.py b/passerelle/apps/gesbac/models.py index f6cf8869..de2ae14e 100644 --- a/passerelle/apps/gesbac/models.py +++ b/passerelle/apps/gesbac/models.py @@ -34,225 +34,162 @@ from passerelle.utils import SFTPField CSV_DELIMITER = ';' FILES_ENCODING = 'latin-1' -APPLICANT_SCHEMA = OrderedDict(( - ("form_id", { - "type": "string", - }), - ("demand_date", { - "type": "string", - "pattern": "^[0-9]{8}$", - }), - ("demand_time", { - "type": "string", - "pattern": "^[0-9]{6}$", - }), - ("producer_code", { - "type": "integer", - }), - ("invariant_number", { - "type": "string", - "maxLength": 10, - "default": "" - }), - ("city_insee_code", { - "type": "string", - }), - ("street_rivoli_code", { - "type": "string", - }), - ("street_name", { - "type": "string", - }), - ("address_complement", { - "type": "string", - "maxLength": 32, - "default": "" - }), - ("street_number", { - "type": "integer", - "default": 0 - }), - ("bis_ter", { - "type": "string", - "maxLength": 3, - "default": "" - }), - ("building", { - "type": "string", - "maxLength": 5, - "default": "" - }), - ("hall", { - "type": "string", - "maxLength": 5, - "default": "" - }), - ("appartment_number", { - "type": "string", - "maxLength": 5, - "default": "" - }), - ("producer_social_reason", { - "type": "string", - "maxLength": 38, - "default": "" - }), - ("producer_title_code", { - "type": "integer", - "default": 0 - }), - ("producer_last_name", { - "type": "string", - "maxLength": 38, - "default": "" - }), - ("producer_first_name", { - "type": "string", - "maxLength": 32, - "default": "" - }), - ("producer_phone", { - "type": "string", - "maxLength": 20, - "default": "" - }), - ("producer_email", { - "type": "string", - "maxLength": 50, - "default": "" - }), - ("owner_last_name", { - "type": "string", - "maxLength": 38, - "default": "" - }), - ("owner_first_name", { - "type": "string", - "maxLength": 32, - "default": "" - }), - ("owner_phone", { - "type": "string", - "maxLength": 20, - "default": "" - }), - ("owner_email", { - "type": "string", - "maxLength": 50, - "default": "" - }), - ("activity_code", { - "type": "integer", - "default": 0 - }), - ("family_members_number", { - "type": "integer", - "default": 0 - }), - ("houses_number", { - "type": "integer", - "default": 0 - }), - ("t1_flats_number", { - "type": "integer", - "default": 0 - }), - ("t2_flats_number", { - "type": "integer", - "default": 0 - }), - ("t3_flats_number", { - "type": "integer", - "default": 0 - }), - ("t4_flats_number", { - "type": "integer", - "default": 0 - }), - ("t5_flats_number", { - "type": "integer", - "default": 0 - }), - ("t6_flats_number", { - "type": "integer", - "default": 0 - }), - ("shops_number", { - "type": "integer", - "default": 0 - }), - ("garden_size", { - "type": "integer", - "default": 0 - }), - ("expected_date", { - "type": "string", - "pattern": "^[0-9]{8}$", - "default": "" - }), - ("expected_time", { - "type": "string", - "pattern": "^[0-9]{4}$", - "default": "" - }), - ("modification_code", { - "type": "integer", - "default": 0 - }), - ("demand_reason_label", { - "type": "string", - "default": "" - }), - ("comment", { - "type": "string", - "maxLength": 500, - "default": "" - })) +APPLICANT_SCHEMA = OrderedDict( + ( + ( + "form_id", + { + "type": "string", + }, + ), + ( + "demand_date", + { + "type": "string", + "pattern": "^[0-9]{8}$", + }, + ), + ( + "demand_time", + { + "type": "string", + "pattern": "^[0-9]{6}$", + }, + ), + ( + "producer_code", + { + "type": "integer", + }, + ), + ("invariant_number", {"type": "string", "maxLength": 10, "default": ""}), + ( + "city_insee_code", + { + "type": "string", + }, + ), + ( + "street_rivoli_code", + { + "type": "string", + }, + ), + ( + "street_name", + { + "type": "string", + }, + ), + ("address_complement", {"type": "string", "maxLength": 32, "default": ""}), + ("street_number", {"type": "integer", "default": 0}), + ("bis_ter", {"type": "string", "maxLength": 3, "default": ""}), + ("building", {"type": "string", "maxLength": 5, "default": ""}), + ("hall", {"type": "string", "maxLength": 5, "default": ""}), + ("appartment_number", {"type": "string", "maxLength": 5, "default": ""}), + ("producer_social_reason", {"type": "string", "maxLength": 38, "default": ""}), + ("producer_title_code", {"type": "integer", "default": 0}), + ("producer_last_name", {"type": "string", "maxLength": 38, "default": ""}), + ("producer_first_name", {"type": "string", "maxLength": 32, "default": ""}), + ("producer_phone", {"type": "string", "maxLength": 20, "default": ""}), + ("producer_email", {"type": "string", "maxLength": 50, "default": ""}), + ("owner_last_name", {"type": "string", "maxLength": 38, "default": ""}), + ("owner_first_name", {"type": "string", "maxLength": 32, "default": ""}), + ("owner_phone", {"type": "string", "maxLength": 20, "default": ""}), + ("owner_email", {"type": "string", "maxLength": 50, "default": ""}), + ("activity_code", {"type": "integer", "default": 0}), + ("family_members_number", {"type": "integer", "default": 0}), + ("houses_number", {"type": "integer", "default": 0}), + ("t1_flats_number", {"type": "integer", "default": 0}), + ("t2_flats_number", {"type": "integer", "default": 0}), + ("t3_flats_number", {"type": "integer", "default": 0}), + ("t4_flats_number", {"type": "integer", "default": 0}), + ("t5_flats_number", {"type": "integer", "default": 0}), + ("t6_flats_number", {"type": "integer", "default": 0}), + ("shops_number", {"type": "integer", "default": 0}), + ("garden_size", {"type": "integer", "default": 0}), + ("expected_date", {"type": "string", "pattern": "^[0-9]{8}$", "default": ""}), + ("expected_time", {"type": "string", "pattern": "^[0-9]{4}$", "default": ""}), + ("modification_code", {"type": "integer", "default": 0}), + ("demand_reason_label", {"type": "string", "default": ""}), + ("comment", {"type": "string", "maxLength": 500, "default": ""}), + ) ) -CARD_SCHEMA = OrderedDict(( - ("card_subject", { - "type": "integer", - }), - ("card_type", { - "type": "integer", - }), - ("card_demand_reason", { - "type": "integer", - }), - ("cards_quantity", { - "type": "integer", - - }), - ("card_number", { - "type": "string", - "maxLength": 20, - }), - ("card_bar_code", { - "type": "string", - "maxLength": 20, - "default": "", - }), - ("card_code", { - "type": "string", - "maxLength": 20, - "default": "", - }), - ("card_validity_start_date", { - "type": "string", - "pattern": "^[0-9]{8}$", - "default": "", - }), - ("card_validity_end_date", { - "type": "string", - "pattern": "^[0-9]{8}$", - "default": "", - }), - ("card_comment", { - "type": "string", - "maxLength": 100, - "default": "", - })) +CARD_SCHEMA = OrderedDict( + ( + ( + "card_subject", + { + "type": "integer", + }, + ), + ( + "card_type", + { + "type": "integer", + }, + ), + ( + "card_demand_reason", + { + "type": "integer", + }, + ), + ( + "cards_quantity", + { + "type": "integer", + }, + ), + ( + "card_number", + { + "type": "string", + "maxLength": 20, + }, + ), + ( + "card_bar_code", + { + "type": "string", + "maxLength": 20, + "default": "", + }, + ), + ( + "card_code", + { + "type": "string", + "maxLength": 20, + "default": "", + }, + ), + ( + "card_validity_start_date", + { + "type": "string", + "pattern": "^[0-9]{8}$", + "default": "", + }, + ), + ( + "card_validity_end_date", + { + "type": "string", + "pattern": "^[0-9]{8}$", + "default": "", + }, + ), + ( + "card_comment", + { + "type": "string", + "maxLength": 100, + "default": "", + }, + ), + ) ) DEMAND_SCHEMA = APPLICANT_SCHEMA.copy() @@ -264,9 +201,17 @@ SCHEMA = { "description": "", "type": "object", "required": [ - "form_id", "demand_date", "demand_time", "producer_code", "city_insee_code", - "street_rivoli_code", "street_name", "card_subject", "card_type", "card_demand_reason", - "cards_quantity" + "form_id", + "demand_date", + "demand_time", + "producer_code", + "city_insee_code", + "street_rivoli_code", + "street_name", + "card_subject", + "card_type", + "card_demand_reason", + "cards_quantity", ], "properties": DEMAND_SCHEMA, } @@ -275,10 +220,8 @@ SCHEMA = { class Gesbac(BaseResource): outcoming_sftp = SFTPField(verbose_name=_('Outcoming SFTP')) incoming_sftp = SFTPField(verbose_name=_('Incoming SFTP')) - output_files_prefix = models.CharField(_('Output files prefix'), - blank=False, max_length=32) - input_files_prefix = models.CharField(_('Input files prefix'), - blank=False, max_length=32) + output_files_prefix = models.CharField(_('Output files prefix'), blank=False, max_length=32) + input_files_prefix = models.CharField(_('Input files prefix'), blank=False, max_length=32) category = _('Business Process Connectors') @@ -319,17 +262,14 @@ class Gesbac(BaseResource): form = Form.objects.get(id=form_id) form.send() - @endpoint(name='create-demand', - perm='can_access', - description=_('Create demand'), - post={ - 'description': _('Creates a demand file'), - 'request_body': { - 'schema': { - 'application/json': SCHEMA - } - } - } + @endpoint( + name='create-demand', + perm='can_access', + description=_('Create demand'), + post={ + 'description': _('Creates a demand file'), + 'request_body': {'schema': {'application/json': SCHEMA}}, + }, ) def create_demand(self, request, post_data): form_id = post_data['form_id'] @@ -365,32 +305,23 @@ class Gesbac(BaseResource): form.demand_data = data form.save() self.add_job('send_demand', form_id=form.id) - return {'data': {'filename': form.get_filename(), - 'gesbac_id': form.get_gesbac_id()}} + return {'data': {'filename': form.get_filename(), 'gesbac_id': form.get_gesbac_id()}} - @endpoint(name='get-response', perm='can_access', - description=_('Get response'), - parameters={ - 'gesbac_id': { - 'description': _('Gesbac demand identifier'), - 'example_value': '420001' - } - } + @endpoint( + name='get-response', + perm='can_access', + description=_('Get response'), + parameters={'gesbac_id': {'description': _('Gesbac demand identifier'), 'example_value': '420001'}}, ) def get_response(self, request, gesbac_id): try: - response = self.form_set.filter(status='closed', - gesbac_id=gesbac_id).latest() + response = self.form_set.filter(status='closed', gesbac_id=gesbac_id).latest() return {'data': response.card_data} except Form.DoesNotExist: raise Http404('No response found') -FORM_STATUSES = ( - ('new', 'New'), - ('sent', 'Sent'), - ('closed', 'Closed') -) +FORM_STATUSES = (('new', 'New'), ('sent', 'Sent'), ('closed', 'Closed')) class Form(models.Model): @@ -400,9 +331,7 @@ class Form(models.Model): counter = models.IntegerField(default=0) creation_datetime = models.DateTimeField(auto_now_add=True) filename = models.CharField(max_length=128, null=True) - status = models.CharField(max_length=8, default='new', - choices=FORM_STATUSES - ) + status = models.CharField(max_length=8, default='new', choices=FORM_STATUSES) demand_data = JSONField(default=dict) card_data = JSONField(default=dict) @@ -419,8 +348,11 @@ class Form(models.Model): def get_filename(self): if not self.filename: timestamp = self.creation_datetime.strftime('%y%m%d-%H%M%S') - self.filename = '%s%s-%s.csv' % (self.resource.output_files_prefix, - timestamp, self.get_gesbac_id()) + self.filename = '%s%s-%s.csv' % ( + self.resource.output_files_prefix, + timestamp, + self.get_gesbac_id(), + ) self.save() return self.filename @@ -434,8 +366,8 @@ class Form(models.Model): # encode strings to ASCII if six.PY2: row = [ - item.encode(FILES_ENCODING) if isinstance(item, six.string_types) - else item for item in row + item.encode(FILES_ENCODING) if isinstance(item, six.string_types) else item + for item in row ] writer.writerow(row) self.status = 'sent' diff --git a/passerelle/apps/jsondatastore/migrations/0001_initial.py b/passerelle/apps/jsondatastore/migrations/0001_initial.py index 93d8596e..a73cbed8 100644 --- a/passerelle/apps/jsondatastore/migrations/0001_initial.py +++ b/passerelle/apps/jsondatastore/migrations/0001_initial.py @@ -16,10 +16,25 @@ class Migration(migrations.Migration): migrations.CreateModel( name='JsonData', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('uuid', models.CharField(default=passerelle.apps.jsondatastore.models.get_hex_uuid, verbose_name='uuid', unique=True, max_length=32, editable=False)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), + ( + 'uuid', + models.CharField( + default=passerelle.apps.jsondatastore.models.get_hex_uuid, + verbose_name='uuid', + unique=True, + max_length=32, + editable=False, + ), + ), ('name_id', models.CharField(max_length=256, blank=True)), - ('content', django.contrib.postgres.fields.jsonb.JSONField(default=dict, verbose_name='Content')), + ( + 'content', + django.contrib.postgres.fields.jsonb.JSONField(default=dict, verbose_name='Content'), + ), ('creation_datetime', models.DateTimeField(auto_now_add=True)), ('last_update_datetime', models.DateTimeField(auto_now=True)), ], @@ -27,12 +42,38 @@ class Migration(migrations.Migration): migrations.CreateModel( name='JsonDataStore', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_jsondata_users_+', related_query_name='+', blank=True)), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_jsondata_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'JSON Data Store', diff --git a/passerelle/apps/jsondatastore/models.py b/passerelle/apps/jsondatastore/models.py index ecd69fa9..4589ebc6 100644 --- a/passerelle/apps/jsondatastore/models.py +++ b/passerelle/apps/jsondatastore/models.py @@ -44,8 +44,7 @@ def clean_json_data(data): class JsonData(models.Model): datastore = models.ForeignKey('JsonDataStore', null=True, on_delete=models.CASCADE) - uuid = models.CharField(_('uuid'), max_length=32, - default=get_hex_uuid, editable=False, unique=True) + uuid = models.CharField(_('uuid'), max_length=32, default=get_hex_uuid, editable=False, unique=True) name_id = models.CharField(max_length=256, blank=True) content = JSONField(_('Content')) text = models.CharField(max_length=256, blank=True) @@ -76,29 +75,27 @@ class JsonData(models.Model): class JsonDataStore(BaseResource): category = _('Data Sources') - documentation_url = 'https://doc-publik.entrouvert.com/admin-fonctionnel/parametrage-avance/json-data-store/' - text_value_template = models.CharField(_('Template for "text" key value'), - max_length=256, blank=True) + documentation_url = ( + 'https://doc-publik.entrouvert.com/admin-fonctionnel/parametrage-avance/json-data-store/' + ) + text_value_template = models.CharField(_('Template for "text" key value'), max_length=256, blank=True) class Meta: verbose_name = _('JSON Data Store') - @endpoint(perm='can_access', name='data', pattern=r'$', - description=_('Listing'), - long_description=_( - 'More filtering on attributes is possible ' - 'using "key=val" additionals parameters'), - parameters={ - 'name_id': { - 'description': _('Object identifier'), - 'example_value': '12345' - }, - 'q': { - 'description': _('Filter on "text" key value'), - 'example_value': 'rue du chateau' - }, - } - ) + @endpoint( + perm='can_access', + name='data', + pattern=r'$', + description=_('Listing'), + long_description=_( + 'More filtering on attributes is possible ' 'using "key=val" additionals parameters' + ), + parameters={ + 'name_id': {'description': _('Object identifier'), 'example_value': '12345'}, + 'q': {'description': _('Filter on "text" key value'), 'example_value': 'rue du chateau'}, + }, + ) def list(self, request, name_id=None, q=None, **kwargs): objects = JsonData.objects.filter(datastore=self) if name_id is not None: @@ -110,10 +107,14 @@ class JsonDataStore(BaseResource): objects = [o for o in objects if o.content.get(key) == value] return {'data': [x.to_json() for x in objects]} - @endpoint(perm='can_access', methods=['post'], name='data', - pattern=r'create$', - example_pattern='create', - description=_('Create')) + @endpoint( + perm='can_access', + methods=['post'], + name='data', + pattern=r'create$', + example_pattern='create', + description=_('Create'), + ) def create(self, request, name_id=None, **kwargs): content = clean_json_data(request.body) attrs = { @@ -135,14 +136,17 @@ class JsonDataStore(BaseResource): attrs['name_id'] = name_id return JsonData.objects.get(**attrs) - @endpoint(perm='can_access', methods=['get', 'post', 'patch'], name='data', - pattern=r'(?P\w+)/$', - example_pattern='{uuid}/', - description_get=_('Get'), - description_post=_('Replace'), - description_patch=_('Update'), - parameters={'uuid': {'description': _('Object identifier'), 'example_value': '12345'}}, - ) + @endpoint( + perm='can_access', + methods=['get', 'post', 'patch'], + name='data', + pattern=r'(?P\w+)/$', + example_pattern='{uuid}/', + description_get=_('Get'), + description_post=_('Replace'), + description_patch=_('Update'), + parameters={'uuid': {'description': _('Object identifier'), 'example_value': '12345'}}, + ) def get_or_replace(self, request, uuid, name_id=None): data = self.get_data_object(uuid, name_id) if request.method == 'POST': @@ -155,25 +159,31 @@ class JsonDataStore(BaseResource): data.save() return data.to_json() - @endpoint(perm='can_access', methods=['post'], name='data', - description=_('Delete'), - pattern=r'(?P\w+)/delete$', - example_pattern='{uuid}/delete', - parameters={'uuid': {'description': _('Object identifier'), 'example_value': '12345'}}, - ) + @endpoint( + perm='can_access', + methods=['post'], + name='data', + description=_('Delete'), + pattern=r'(?P\w+)/delete$', + example_pattern='{uuid}/delete', + parameters={'uuid': {'description': _('Object identifier'), 'example_value': '12345'}}, + ) def delete_(self, request, uuid, name_id=None): # delete() would collide with Model.delete() self.get_data_object(uuid, name_id).delete() return {} - @endpoint(perm='can_access', name='data', - pattern=r'by/(?P[\w-]+)/$', - example_pattern='by/{attribute}/', - description=_('Get a single object by attribute'), - parameters={'attribute': {'description': _('Attribute name'), 'example_value': 'code'}, - 'value': {'description': _('Attribute value'), 'example_value': '12345'}, - }, - ) + @endpoint( + perm='can_access', + name='data', + pattern=r'by/(?P[\w-]+)/$', + example_pattern='by/{attribute}/', + description=_('Get a single object by attribute'), + parameters={ + 'attribute': {'description': _('Attribute name'), 'example_value': 'code'}, + 'value': {'description': _('Attribute value'), 'example_value': '12345'}, + }, + ) def get_by_attribute(self, request, attribute, value, name_id=None): objects = JsonData.objects.filter(datastore=self) if name_id is not None: diff --git a/passerelle/apps/maelis/migrations/0001_initial.py b/passerelle/apps/maelis/migrations/0001_initial.py index 66067d7d..37a9a0ae 100644 --- a/passerelle/apps/maelis/migrations/0001_initial.py +++ b/passerelle/apps/maelis/migrations/0001_initial.py @@ -18,7 +18,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Link', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('name_id', models.CharField(max_length=256)), ('family_id', models.CharField(max_length=128)), ('created', models.DateTimeField(auto_now_add=True)), @@ -28,14 +31,27 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Maelis', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), ('description', models.TextField(verbose_name='Description')), - ('base_url', models.URLField(default='http://www3.sigec.fr/entrouvertws/services/', verbose_name='Base API URL')), + ( + 'base_url', + models.URLField( + default='http://www3.sigec.fr/entrouvertws/services/', verbose_name='Base API URL' + ), + ), ('login', models.CharField(max_length=256, verbose_name='API Login')), ('password', models.CharField(max_length=256, verbose_name='API Password')), - ('users', models.ManyToManyField(blank=True, related_name='_maelis_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'users', + models.ManyToManyField( + blank=True, related_name='_maelis_users_+', related_query_name='+', to='base.ApiUser' + ), + ), ], options={ 'verbose_name': 'Maelis', diff --git a/passerelle/apps/maelis/models.py b/passerelle/apps/maelis/models.py index d52f7e3f..140b49f0 100644 --- a/passerelle/apps/maelis/models.py +++ b/passerelle/apps/maelis/models.py @@ -53,8 +53,8 @@ LINK_SCHEMA = { "school_year": { "description": "school year", "type": "string", - } - } + }, + }, } COORDINATES_SCHEMA = { @@ -63,11 +63,7 @@ COORDINATES_SCHEMA = { "description": "Person Coordinates", "type": "object", "properties": { - "num": { - "description": "number", - "type": "string", - "pattern": "^[0-9]*$" - }, + "num": {"description": "number", "type": "string", "pattern": "^[0-9]*$"}, "street": { "description": "street", "type": "string", @@ -91,14 +87,13 @@ COORDINATES_SCHEMA = { "mail": { "description": "mail", "type": "string", - } - } + }, + }, } class Maelis(BaseResource): - base_url = models.URLField(_('Base API URL'), - default='http://www3.sigec.fr/entrouvertws/services/') + base_url = models.URLField(_('Base API URL'), default='http://www3.sigec.fr/entrouvertws/services/') login = models.CharField(_('API Login'), max_length=256) password = models.CharField(_('API Password'), max_length=256) @@ -138,10 +133,9 @@ class Maelis(BaseResource): if not school_year: # fallback to current year if not provided school_year = utils.get_school_year() - family_data = serialize_object(self.call('FamilyService?wsdl', - 'readFamily', - dossierNumber=family_id, - schoolYear=school_year)) + family_data = serialize_object( + self.call('FamilyService?wsdl', 'readFamily', dossierNumber=family_id, schoolYear=school_year) + ) for child in family_data['childInfoList']: utils.normalize_person(child) return family_data @@ -158,9 +152,12 @@ class Maelis(BaseResource): def get_invoices(self, regie_id, name_id): family_id = self.get_link(name_id).family_id - return [utils.normalize_invoice(i) for i in self.call( - 'InvoiceService?wsdl', 'readInvoices', - numDossier=family_id, codeRegie=regie_id)] + return [ + utils.normalize_invoice(i) + for i in self.call( + 'InvoiceService?wsdl', 'readInvoices', numDossier=family_id, codeRegie=regie_id + ) + ] @endpoint( display_category=_('Family'), @@ -170,26 +167,22 @@ class Maelis(BaseResource): parameters={ 'NameID': {'description': _('Publik ID')}, }, - post={ - 'request_body': { - 'schema': { - 'application/json': LINK_SCHEMA - } - } - }) + post={'request_body': {'schema': {'application/json': LINK_SCHEMA}}}, + ) def link(self, request, NameID, post_data): if 'school_year' not in post_data: # fallback to default year if not provided post_data['school_year'] = utils.get_school_year() - r = self.call('FamilyService?wsdl', 'readFamilyByPassword', - dossierNumber=post_data['family_id'], - password=post_data['password'], - schoolYear=post_data['school_year'] - ) + r = self.call( + 'FamilyService?wsdl', + 'readFamilyByPassword', + dossierNumber=post_data['family_id'], + password=post_data['password'], + schoolYear=post_data['school_year'], + ) if not r.number: raise APIError('Family not found', err_code='not-found') - Link.objects.update_or_create(resource=self, name_id=NameID, - defaults={'family_id': r.number}) + Link.objects.update_or_create(resource=self, name_id=NameID, defaults={'family_id': r.number}) return {'data': serialize_object(r)} @endpoint( @@ -200,7 +193,8 @@ class Maelis(BaseResource): perm='can_access', parameters={ 'NameID': {'description': _('Publik ID')}, - }) + }, + ) def unlink(self, request, NameID): link = self.get_link(NameID) link_id = link.pk @@ -215,7 +209,8 @@ class Maelis(BaseResource): perm='can_access', parameters={ 'NameID': {'description': _('Publik ID')}, - }) + }, + ) def family_info(self, request, NameID): link = self.get_link(NameID) family_data = self.get_family_data(link.family_id) @@ -229,7 +224,8 @@ class Maelis(BaseResource): name='children-info', parameters={ 'NameID': {'description': _('Publik ID')}, - }) + }, + ) def children_info(self, request, NameID): link = self.get_link(NameID) family_data = self.get_family_data(link.family_id) @@ -243,7 +239,8 @@ class Maelis(BaseResource): name='adults-info', parameters={ 'NameID': {'description': _('Publik ID')}, - }) + }, + ) def adults_info(self, request, NameID): link = self.get_link(NameID) family_data = self.get_family_data(link.family_id) @@ -263,7 +260,8 @@ class Maelis(BaseResource): parameters={ 'NameID': {'description': _('Publik ID')}, 'childID': {'description': _('Child ID')}, - }) + }, + ) def child_info(self, request, NameID, childID): return {'data': self.get_child_info(NameID, childID)} @@ -277,13 +275,8 @@ class Maelis(BaseResource): 'NameID': {'description': _('Publik ID')}, 'personID': {'description': _('Person ID')}, }, - post={ - 'request_body': { - 'schema': { - 'application/json': COORDINATES_SCHEMA - } - } - }) + post={'request_body': {'schema': {'application/json': COORDINATES_SCHEMA}}}, + ) def update_coordinates(self, request, NameID, personID, post_data): link = self.get_link(NameID) params = defaultdict(dict) @@ -297,10 +290,9 @@ class Maelis(BaseResource): if contact_param in post_data: params['contact'][contact_param] = post_data[contact_param] - r = self.call('FamilyService?wsdl', 'updateCoordinate', - numDossier=link.family_id, - numPerson=personID, - **params) + r = self.call( + 'FamilyService?wsdl', 'updateCoordinate', numDossier=link.family_id, numPerson=personID, **params + ) return serialize_object(r) @endpoint( @@ -313,11 +305,11 @@ class Maelis(BaseResource): description=_("Get invoices to pay"), parameters={ 'NameID': {'description': _('Publik ID')}, - 'regie_id': {'description': _('Regie identifier'), 'example_value': '42-42'} - }) + 'regie_id': {'description': _('Regie identifier'), 'example_value': '42-42'}, + }, + ) def invoices(self, request, regie_id, NameID): - invoices = [i for i in self.get_invoices( - regie_id=regie_id, name_id=NameID) if not i['paid']] + invoices = [i for i in self.get_invoices(regie_id=regie_id, name_id=NameID) if not i['paid']] return {'data': invoices} @endpoint( @@ -330,11 +322,11 @@ class Maelis(BaseResource): description=_("Get invoices already paid"), parameters={ 'NameID': {'description': _('Publik ID')}, - 'regie_id': {'description': _('Regie identifier'), 'example_value': '42-42'} - }) + 'regie_id': {'description': _('Regie identifier'), 'example_value': '42-42'}, + }, + ) def invoices_history(self, request, regie_id, NameID): - invoices = [i for i in self.get_invoices( - regie_id=regie_id, name_id=NameID) if i['paid']] + invoices = [i for i in self.get_invoices(regie_id=regie_id, name_id=NameID) if i['paid']] return {'data': invoices} @endpoint( @@ -348,8 +340,9 @@ class Maelis(BaseResource): parameters={ 'NameID': {'description': _('Publik ID')}, 'regie_id': {'description': _('Regie identifier'), 'example_value': '1'}, - 'invoice_id': {'description': _('Invoice identifier'), 'example_value': '42-42'} - }) + 'invoice_id': {'description': _('Invoice identifier'), 'example_value': '42-42'}, + }, + ) def invoice(self, request, regie_id, invoice_id, NameID): for invoice in self.get_invoices(regie_id=regie_id, name_id=NameID): if invoice['id'] == invoice_id: @@ -366,8 +359,9 @@ class Maelis(BaseResource): parameters={ 'NameID': {'description': _('Publik ID')}, 'regie_id': {'description': _('Regie identifier'), 'example_value': '1'}, - 'invoice_id': {'description': _('Invoice identifier'), 'example_value': '42-42'} - }) + 'invoice_id': {'description': _('Invoice identifier'), 'example_value': '42-42'}, + }, + ) def invoice_pdf(self, request, regie_id, invoice_id, **kwargs): # TODO to implement pass @@ -380,9 +374,11 @@ class Maelis(BaseResource): 'NameID': {'description': _('Publik ID')}, 'personID': {'description': _('Person ID')}, 'school_year': {'description': _('School year')}, - }) - def activity_list(self, request, NameID, personID, school_year=None, start_datetime=None, - end_datetime=None): + }, + ) + def activity_list( + self, request, NameID, personID, school_year=None, start_datetime=None, end_datetime=None + ): link = self.get_link(NameID) family_data = self.get_family_data(link.family_id) if personID not in [c['id'] for c in family_data['childInfoList']]: @@ -393,10 +389,14 @@ class Maelis(BaseResource): start_datetime = timezone.now() if not end_datetime: end_datetime = start_datetime + timezone.timedelta(days=62) - r = self.call('ActivityService?wsdl', 'readActivityList', - schoolyear=school_year, numPerson=personID, - dateStartCalend=start_datetime, - dateEndCalend=end_datetime) + r = self.call( + 'ActivityService?wsdl', + 'readActivityList', + schoolyear=school_year, + numPerson=personID, + dateStartCalend=start_datetime, + dateEndCalend=end_datetime, + ) activities = serialize_object(r) return {'data': [utils.normalize_activity(a) for a in activities]} @@ -419,11 +419,14 @@ class Maelis(BaseResource): return school_year, start_date, end_date def get_child_activities(self, childID, school_year, start_date, end_date): - r = self.call('ActivityService?wsdl', 'readActivityList', - schoolyear=school_year, - numPerson=childID, - dateStartCalend=start_date, - dateEndCalend=end_date) + r = self.call( + 'ActivityService?wsdl', + 'readActivityList', + schoolyear=school_year, + numPerson=childID, + dateStartCalend=start_date, + dateEndCalend=end_date, + ) return serialize_object(r) @endpoint( @@ -435,14 +438,14 @@ class Maelis(BaseResource): parameters={ 'NameID': {'description': _('Publik ID')}, 'childID': {'description': _('Child ID')}, - 'subscribePublication': { - 'description': _('string including E, N or L (default to "E")')}, - 'subscribingStatus': { - 'description': _('subscribed, not-subscribed or None')}, - 'queryDate': {'description': _('Optional querying date (YYYY-MM-DD)')} - }) - def child_activities(self, request, NameID, childID, - subscribePublication='E', subscribingStatus=None, queryDate=None): + 'subscribePublication': {'description': _('string including E, N or L (default to "E")')}, + 'subscribingStatus': {'description': _('subscribed, not-subscribed or None')}, + 'queryDate': {'description': _('Optional querying date (YYYY-MM-DD)')}, + }, + ) + def child_activities( + self, request, NameID, childID, subscribePublication='E', subscribingStatus=None, queryDate=None + ): if subscribingStatus and subscribingStatus not in ('subscribed', 'not-subscribed'): raise APIError('wrong value for subscribingStatus: %s' % subscribingStatus) school_year, start_date, end_date = self.get_activities_dates(queryDate) @@ -451,7 +454,8 @@ class Maelis(BaseResource): flatted_activities = utils.flatten_activities(activities, start_date, end_date) utils.mark_subscribed_flatted_activities(flatted_activities, child_info) data = utils.flatted_activities_as_list( - flatted_activities, subscribePublication, subscribingStatus, start_date) + flatted_activities, subscribePublication, subscribingStatus, start_date + ) return {'data': data} @endpoint( @@ -466,9 +470,9 @@ class Maelis(BaseResource): 'activityID': {'description': _('Activity ID')}, 'unitID': {'description': _('Unit ID')}, 'queryDate': {'description': _('Optional querying date (YYYY-MM-DD)')}, - 'direction': { - 'description': _('aller, retour or None')}, - }) + 'direction': {'description': _('aller, retour or None')}, + }, + ) def bus_lines(self, request, NameID, childID, activityID, unitID, queryDate=None, direction=None): if direction and direction.lower() not in ('aller', 'retour'): raise APIError('wrong value for direction: %s' % direction) @@ -492,16 +496,18 @@ class Maelis(BaseResource): unit_weekly_planning += unit_calendar_letter else: unit_weekly_planning += '1' - bus_lines.append({ - 'id': bus_unit_info['unit_id'], - 'text': bus_unit_info['unit_text'], - 'unit_id': bus_unit_info['unit_id'], - 'activity_id': bus_activity_id, - 'unit_calendar_letter': unit_calendar_letter, - 'unit_weekly_planning': unit_weekly_planning, - 'subscribe_start_date': legacy_unit_info['unit_start_date'], - 'subscribe_end_date': legacy_unit_info['unit_end_date'], - }) + bus_lines.append( + { + 'id': bus_unit_info['unit_id'], + 'text': bus_unit_info['unit_text'], + 'unit_id': bus_unit_info['unit_id'], + 'activity_id': bus_activity_id, + 'unit_calendar_letter': unit_calendar_letter, + 'unit_weekly_planning': unit_weekly_planning, + 'subscribe_start_date': legacy_unit_info['unit_start_date'], + 'subscribe_end_date': legacy_unit_info['unit_end_date'], + } + ) return {'data': bus_lines} @endpoint( @@ -515,8 +521,9 @@ class Maelis(BaseResource): 'childID': {'description': _('Child ID')}, 'busActivityID': {'description': _('Activity ID')}, 'busUnitID': {'description': _('Bus Unit ID')}, - 'queryDate': {'description': _('Optional querying date (YYYY-MM-DD)')} - }) + 'queryDate': {'description': _('Optional querying date (YYYY-MM-DD)')}, + }, + ) def bus_stops(self, request, NameID, childID, busActivityID, busUnitID, queryDate=None): school_year, start_date, end_date = self.get_activities_dates(queryDate) self.get_child_info(NameID, childID) @@ -538,10 +545,12 @@ class Maelis(BaseResource): bus_stops = [] for place in unit['placeList']: - bus_stops.append({ - 'id': place['id'], - 'text': ' '.join([w.capitalize() for w in place['lib'].split(' ')]), - }) + bus_stops.append( + { + 'id': place['id'], + 'text': ' '.join([w.capitalize() for w in place['lib'].split(' ')]), + } + ) if bus_stops: bus_stops[0]['disabled'] = True # hide terminus return {'data': bus_stops} @@ -558,7 +567,8 @@ class Maelis(BaseResource): 'start_date': {'description': _('Start date (YYYY-MM-DD format)')}, 'end_date': {'description': _('End date (YYYY-MM-DD format)')}, 'legacy': {'description': _('Decompose events related to parts of the day if set')}, - }) + }, + ) def child_planning(self, request, NameID, childID, start_date=None, end_date=None, legacy=None): """ Return an events list sorted by id """ link = self.get_link(NameID) @@ -572,28 +582,35 @@ class Maelis(BaseResource): start, end = utils.week_boundaries_datetimes(start_date) school_year = utils.get_school_year(start.date()) - r = self.call('ActivityService?wsdl', 'readActivityList', - schoolyear=school_year, numPerson=childID, - dateStartCalend=start, - dateEndCalend=end) + r = self.call( + 'ActivityService?wsdl', + 'readActivityList', + schoolyear=school_year, + numPerson=childID, + dateStartCalend=start, + dateEndCalend=end, + ) activities = serialize_object(r) - events = {key: value - for a in activities - for (key, value) in utils.get_events(a, start, end)} + events = {key: value for a in activities for (key, value) in utils.get_events(a, start, end)} for date in utils.month_range(start, end): - r = self.call('ActivityService?wsdl', 'readChildMonthPlanning', - year=date.year, - numMonth=date.month, - numPerson=childID) + r = self.call( + 'ActivityService?wsdl', + 'readChildMonthPlanning', + year=date.year, + numMonth=date.month, + numPerson=childID, + ) planning = serialize_object(r['calendList']) for schedule in planning: utils.book_event(events, schedule, start, end) if not legacy: - events = {x['id']: x # dictionary is used de remove dupplicated events - for e in events.values() - for x in utils.decompose_event(e)} + events = { + x['id']: x # dictionary is used de remove dupplicated events + for e in events.values() + for x in utils.decompose_event(e) + } return {'data': [s[1] for s in sorted(events.items())]} @endpoint( @@ -610,28 +627,26 @@ class Maelis(BaseResource): 'weeklyPlanning': {'description': _('Week planning (7 chars)')}, 'start_date': {'description': _('Start date of the unit (YYYY-MM-DD)')}, 'end_date': {'description': _('End date of the unit (YYYY-MM-DD)')}, - }) - def subscribe(self, request, NameID, childID, activityID, unitID, placeID, - weeklyPlanning, start_date, end_date): + }, + ) + def subscribe( + self, request, NameID, childID, activityID, unitID, placeID, weeklyPlanning, start_date, end_date + ): self.get_child_info(NameID, childID) client = self.get_client('FamilyService?wsdl') trigram_type = client.get_type('ns1:activityUnitPlaceBean') - trigram = trigram_type( - idActivity=activityID, - idUnit=unitID, - idPlace=placeID) + trigram = trigram_type(idActivity=activityID, idUnit=unitID, idPlace=placeID) subscription_type = client.get_type('ns1:subscribeActivityRequestBean') subpscription = subscription_type( personNumber=childID, activityUnitPlace=trigram, weeklyPlanning=weeklyPlanning, dateStart=start_date, - dateEnd=end_date) - r = self.call('FamilyService?wsdl', 'subscribeActivity', - subscribeActivityRequestBean=subpscription) + dateEnd=end_date, + ) + r = self.call('FamilyService?wsdl', 'subscribeActivity', subscribeActivityRequestBean=subpscription) return {'data': serialize_object(r)} - @endpoint( display_category=_('Family'), perm='can_access', @@ -642,13 +657,17 @@ class Maelis(BaseResource): 'childID': {'description': _('Child ID')}, 'activityID': {'description': _('Activity ID')}, 'start_date': {'description': _('Start date of the unit (YYYY-MM-DD)')}, - }) + }, + ) def unsubscribe(self, request, NameID, childID, activityID, start_date): self.get_child_info(NameID, childID) - r = self.call('FamilyService?wsdl', 'deletesubscribe', - numPerson=childID, - idActivite=activityID, - dateRefDelete=start_date) + r = self.call( + 'FamilyService?wsdl', + 'deletesubscribe', + numPerson=childID, + idActivite=activityID, + dateRefDelete=start_date, + ) return {'data': serialize_object(r)} diff --git a/passerelle/apps/maelis/utils.py b/passerelle/apps/maelis/utils.py index ab214316..b6e16078 100644 --- a/passerelle/apps/maelis/utils.py +++ b/passerelle/apps/maelis/utils.py @@ -43,7 +43,7 @@ COMPONENTS = { 'PART03': { 'text': 'Après-midi', 'time': '14:00:00', - } + }, } COMPOSED_UNITS = { @@ -77,14 +77,10 @@ def normalize_invoice(invoice): 'total_amount': invoice.amountInvoice, 'pay_limit_date': invoice.dateDeadline.strftime(DATETIME_FORMAT), 'has_pdf': bool(invoice.pdfName), - 'amount_paid': invoice.amountPaid + 'amount_paid': invoice.amountPaid, } if invoice.amountInvoice == invoice.amountPaid: - data.update({ - 'amount': 0, - 'pay_limit_date': '', - 'online_payment': False - }) + data.update({'amount': 0, 'pay_limit_date': '', 'online_payment': False}) return data @@ -96,8 +92,7 @@ def normalize_activity(activity): def normalize_person(person): person['id'] = person['num'] - person['text'] = '{} {}'.format( - person['firstname'], person['lastname']).strip() + person['text'] = '{} {}'.format(person['firstname'], person['lastname']).strip() return person @@ -111,8 +106,8 @@ def get_school_year(date=None): def week_boundaries_datetimes(date_string=None): - """ Return start and end of the week including the provided date, - or the current week if no date is provided. """ + """Return start and end of the week including the provided date, + or the current week if no date is provided.""" if date_string: date = parse_date(date_string) else: @@ -138,9 +133,9 @@ def month_range(start_datetime, end_datetime): def get_events(activity, start_datetime, end_datetime): - """ Generate events from activity's open days + """Generate events from activity's open days the events looks like the chrono ones : /api/agenda/agenda-evenement/datetimes/ - (https://doc-publik.entrouvert.com/dev/api-chrono/#exemple) """ + (https://doc-publik.entrouvert.com/dev/api-chrono/#exemple)""" activity_id = activity['activityPortail']['idAct'] for unit in activity['unitPortailList']: unit_id = unit['idUnit'] @@ -179,15 +174,14 @@ def book_event(events, schedule, start_date, end_date): try: event = events[event_id] except KeyError: - raise APIError('The planning returns an unknow day on activities: %s' - % day['datePlanning']) + raise APIError('The planning returns an unknow day on activities: %s' % day['datePlanning']) event['user_booking_status'] = 'booked' def decompose_event(event): - """ Break down 'JOURNEE', 'MATIN', 'MATIN ET REPAS' and APRES MIDI' units - into 'Matin', 'Repas' and 'Après-midi' virtual units. """ + """Break down 'JOURNEE', 'MATIN', 'MATIN ET REPAS' and APRES MIDI' units + into 'Matin', 'Repas' and 'Après-midi' virtual units.""" if event['slot_id'] not in COMPOSED_UNITS.keys(): yield event return @@ -201,8 +195,7 @@ def decompose_event(event): new_event = copy(event) new_event['datetime'] = '%s %s' % (date_string, component['time']) new_event['slot_id'] = "%s%s" % (composition['virtual_unit'], component_id) - new_event['id'] = '%s-%s-%s' % ( - date_string, event['category_id'], new_event['slot_id']) + new_event['id'] = '%s-%s-%s' % (date_string, event['category_id'], new_event['slot_id']) new_event['text'] = component['text'] yield new_event @@ -217,23 +210,20 @@ def flatten_activities(activities, start_date, end_date): activity_text = activity_text_legacy = activity['activityPortail']['label'] match = regex.match(activity_text) if match: - activity_text = activity_text[match.end():].strip() + activity_text = activity_text[match.end() :].strip() activity_text = activity_text.capitalize() activity_obj = deepcopy(activity) del activity_obj['unitPortailList'] if not activity_obj['activityPortail']['activityType']: activity_obj['activityPortail']['activityType'] = { - "code" : "?", - "libelle" : "Inconnu", - "natureSpec" : { - "code" : "?", - "libelle" : "Inconnu" - } + "code": "?", + "libelle": "Inconnu", + "natureSpec": {"code": "?", "libelle": "Inconnu"}, } # compute weekly planning mask parameter to use for subscribing planning_masks = [] - for year in range(start_date.year, end_date.year+1): + for year in range(start_date.year, end_date.year + 1): for item in activity['activityPortail']['weeklyCalendarActivityList']: if item['yearCalendar'] == year: planning_masks.append(item['weeklyCalendarStr']) @@ -268,7 +258,7 @@ def flatten_activities(activities, start_date, end_date): unit_text = unit_text_legacy = unit['label'] match = regex.match(unit_text) if match: - unit_text = unit_text[match.end():].strip() + unit_text = unit_text[match.end() :].strip() unit_text = unit_text.capitalize() text_first_part = activity_text if activity_text != unit_text: @@ -288,7 +278,7 @@ def flatten_activities(activities, start_date, end_date): unit_weekly_planning += '1' unit_info = { 'unit_id': unit_id, - 'unit_text' : unit_text, + 'unit_text': unit_text, 'text_first_part': text_first_part, 'unit_object': unit_obj, 'unit_start_date': unit_start_date, @@ -305,8 +295,8 @@ def flatten_activities(activities, start_date, end_date): places[place_text_legacy] = { 'id': '%s-%s-%s' % (activity_id, unit_id, place_id), - 'text_legacy': '%s / %s / %s' % ( - activity_text_legacy, unit_text_legacy, place_text_legacy), + 'text_legacy': '%s / %s / %s' + % (activity_text_legacy, unit_text_legacy, place_text_legacy), 'text_first_part': text_first_part, 'text': "%s / %s" % (text_first_part, place_text), 'activity_id': activity_id, diff --git a/passerelle/apps/mdel/mdel.py b/passerelle/apps/mdel/mdel.py index d6751fea..163fa1a8 100644 --- a/passerelle/apps/mdel/mdel.py +++ b/passerelle/apps/mdel/mdel.py @@ -34,7 +34,6 @@ def get_resource_base_dir(): class AttachedFile(object): - def __init__(self, code, filename, b64_content): if code not in ('JI', 'JD'): raise APIError('%s is not a valid code (JI or JD)' % code) @@ -43,22 +42,19 @@ class AttachedFile(object): self.code = code def save(self, directory): - """Writes file into directory - """ + """Writes file into directory""" path = os.path.join(directory, self.filename) default_storage.save(path, ContentFile(self.content)) class MDELBase(object): - def to_string(self): raw_string = etree.tostring(self.xml, encoding='utf-8') parsed_string = minidom.parseString(raw_string) return parsed_string.toprettyxml(indent='\t') def save(self, subfolder, filename): - """Save object as xml file - """ + """Save object as xml file""" folder = os.path.join(get_resource_base_dir(), subfolder) path = os.path.join(folder, filename) default_storage.save(path, ContentFile(self.to_string())) @@ -81,58 +77,57 @@ class Common(MDELBase): return '%s-%s' % (self.num, self.flow_type) def teledemarche_xml(self, num, date=None, platform_id=1): - """Returns XML Teledemarche Element - """ + """Returns XML Teledemarche Element""" if not date: date = datetime.datetime.utcnow().isoformat() else: date = dateutil_parse(date).isoformat() teledemarche = etree.Element('Teledemarche') - teledemarche.extend([ - ElementFactory('NumeroTeledemarche', text=num), - ElementFactory('Date', text=date), - ElementFactory('IdentifiantPlateforme', text=platform_id), - ]) + teledemarche.extend( + [ + ElementFactory('NumeroTeledemarche', text=num), + ElementFactory('Date', text=date), + ElementFactory('IdentifiantPlateforme', text=platform_id), + ] + ) return teledemarche def routage_xml(self, code_insee): - """Returns XML Routage Element - """ + """Returns XML Routage Element""" routage = etree.Element('Routage') donnee = etree.Element('Donnee') - donnee.extend([ - ElementFactory('Id', text='CodeINSEE'), - ElementFactory('Valeur', text=code_insee) - ]) + donnee.extend([ElementFactory('Id', text='CodeINSEE'), ElementFactory('Valeur', text=code_insee)]) routage.append(donnee) return routage def piecejointe_xml(self, attached_files=None): - """Returns XML PieceJointe Element - """ + """Returns XML PieceJointe Element""" if not attached_files: return for f in attached_files: piece_jointe = etree.Element('PieceJointe') - piece_jointe.extend([ - ElementFactory('Intitule', text=f.title), - ElementFactory('Code', text=f.code), - ElementFactory('Fichier', text=f.filename) - ]) + piece_jointe.extend( + [ + ElementFactory('Intitule', text=f.title), + ElementFactory('Code', text=f.code), + ElementFactory('Fichier', text=f.filename), + ] + ) yield piece_jointe def document_xml(self, code, form_files=None): - """Returns a XML Document Element - """ + """Returns a XML Document Element""" document = ElementFactory('Document') - document.extend([ - ElementFactory('Code', text=code), - ElementFactory('Nom', text=code), - ElementFactory('FichierFormulaire') - ]) + document.extend( + [ + ElementFactory('Code', text=code), + ElementFactory('Nom', text=code), + ElementFactory('FichierFormulaire'), + ] + ) if not form_files and not self.doc: form_files = [self.code + '-doc-.xml'] @@ -140,23 +135,29 @@ class Common(MDELBase): form_files = [self.doc.filename] for f in form_files: - document.find('FichierFormulaire').append( - ElementFactory('FichierDonnees', text=f) - ) + document.find('FichierFormulaire').append(ElementFactory('FichierDonnees', text=f)) return document class Message(Common): - """Class descrbing a message.xml file - """ + """Class descrbing a message.xml file""" + filename = 'message.xml' ns_prefix = 'ns2' ns_uri = 'http://finances.gouv.fr/dgme/pec/message/v1' def __init__( - self, flow_type, num, zip_code, date=None, - platform_id='1', form_files=None, attached_files=None, doc=None): + self, + flow_type, + num, + zip_code, + date=None, + platform_id='1', + form_files=None, + attached_files=None, + doc=None, + ): super(Message, self).__init__(flow_type, num, doc=doc) @@ -168,10 +169,12 @@ class Message(Common): # HEADERS header = ElementFactory('Header', namespace=self.ns_uri) routing = ElementFactory('Routing', namespace=self.ns_uri) - routing.extend([ - ElementFactory('MessageId', text=num, namespace=self.ns_uri), - ElementFactory('FlowType', text=flow_type, namespace=self.ns_uri), - ]) + routing.extend( + [ + ElementFactory('MessageId', text=num, namespace=self.ns_uri), + ElementFactory('FlowType', text=flow_type, namespace=self.ns_uri), + ] + ) header.append(routing) # BODY @@ -184,11 +187,13 @@ class Message(Common): aller.append(ElementFactory('NumeroDemarche', text='EtatCivil')) code = 'ActeEtatCivil-XML' - aller.extend([ - self.teledemarche_xml(num, date, platform_id), - self.routage_xml(zip_code), - self.document_xml(code, form_files) - ]) + aller.extend( + [ + self.teledemarche_xml(num, date, platform_id), + self.routage_xml(zip_code), + self.document_xml(code, form_files), + ] + ) [aller.append(f) for f in self.piecejointe_xml(attached_files) if f] @@ -201,18 +206,23 @@ class Message(Common): class Description(Common): - """Class describing a -ent-.xml file - """ + """Class describing a -ent-.xml file""" def __init__( - self, flow_type, num, zip_code, date=None, - platform_id='1', form_files=[], attached_files=[], - step=None, doc=None + self, + flow_type, + num, + zip_code, + date=None, + platform_id='1', + form_files=[], + attached_files=[], + step=None, + doc=None, ): super(Description, self).__init__( - flow_type, num, zip_code, date, - platform_id, form_files, attached_files, doc=doc + flow_type, num, zip_code, date, platform_id, form_files, attached_files, doc=doc ) self.step = step @@ -223,11 +233,13 @@ class Description(Common): root.append(ElementFactory('NumeroDemarche', text='EtatCivil')) code = 'ActeEtatCivil-XML' - root.extend([ - self.teledemarche_xml(num, date, platform_id), - self.routage_xml(zip_code), - self.document_xml(code, form_files) - ]) + root.extend( + [ + self.teledemarche_xml(num, date, platform_id), + self.routage_xml(zip_code), + self.document_xml(code, form_files), + ] + ) for f in self.piecejointe_xml(attached_files): root.append(f) @@ -242,8 +254,7 @@ class Description(Common): class Data(MDELBase): - """Class describing a -doc.xml file - """ + """Class describing a -doc.xml file""" def __init__(self, demand_id, data, step=None): self.data = self.data_validator(data) @@ -254,7 +265,8 @@ class Data(MDELBase): def filename(self): if 'AEC-LA' in self.demand_id: return '%s-doc-ActeEtatCivil-XML-1-%s.xml' % ( - self.demand_id.replace('AEC-LA', 'EtatCivil'), self.step + self.demand_id.replace('AEC-LA', 'EtatCivil'), + self.step, ) return '%s-doc-.xml' % self.demand_id @@ -262,8 +274,7 @@ class Data(MDELBase): return super(Data, self).save(folder, self.filename) def data_validator(self, data): - """Checks if required keys are present - """ + """Checks if required keys are present""" missing_field = set(self.required).difference(set(data.keys())) if missing_field: raise APIError('field %s is required' % missing_field.pop()) @@ -286,6 +297,7 @@ class Data(MDELBase): return root + class ILEData(Data): mapping = [ @@ -294,10 +306,7 @@ class ILEData(Data): ('prenoms', 'Inscription_Electeur_Prenoms_Prenom'), ('date_naissance', 'Inscription_Electeur_DateDeNaissance'), ('lieunaiss_localite', 'Inscription_Electeur_LieuDeNaissance_Localite'), - ( - 'lieunaiss_division_territoriale', - 'Inscription_Electeur_LieuDeNaissance_DivisionTerritoriale' - ), + ('lieunaiss_division_territoriale', 'Inscription_Electeur_LieuDeNaissance_DivisionTerritoriale'), ('lieunaiss_code_postal', 'Inscription_Electeur_LieuDeNaissance_CodePostal'), ('lieunaiss_pays_raw', 'Inscription_Electeur_LieuDeNaissance_Pays'), ('sexe_raw', 'Inscription_Electeur_Sexe'), @@ -308,50 +317,32 @@ class ILEData(Data): ('adresse_nom_voie', 'Inscription_Electeur_AdresseDeLElecteur_NomVoie'), ('adresse_code_postal', 'Inscription_Electeur_AdresseDeLElecteur_CodePostal'), ('adresse_localite', 'Inscription_Electeur_AdresseDeLElecteur_Localite'), - ( - 'adresse_division_territoriale', - 'Inscription_Electeur_AdresseDeLElecteur_DivisionTerritoriale' - ), + ('adresse_division_territoriale', 'Inscription_Electeur_AdresseDeLElecteur_DivisionTerritoriale'), ('contact_code', 'Inscription_Electeur_MethodeDeContact_CanalCode'), ('contact_uri', 'Inscription_Electeur_MethodeDeContact_URI'), ('inscription_liste_type_raw', 'Inscription_TypeDeListe'), ('inscription_localite', 'Inscription_CommuneDInscription_Localite'), - ( - 'inscription_division_territoriale', - 'Inscription_CommuneDInscription_DivisionTerritoriale'), + ('inscription_division_territoriale', 'Inscription_CommuneDInscription_DivisionTerritoriale'), ('inscription_code_insee', 'Inscription_CommuneDInscription_CodeInsee'), ('inscription_date', 'DateDInscription'), ('inscription_type', 'TypeDInscription'), ('anterieur_situation_raw', 'SituationElectoraleAnterieure_SituationDeLElecteur'), ( 'anterieur_consulat', - 'SituationElectoraleAnterieure_ConsulatDePrecedenteInscription' - '_AmbassadeOuPosteConsulaire' - ), - ( - 'anterieur_consulat_pays_raw', - 'SituationElectoraleAnterieure_ConsulatDePrecedenteInscription_Pays' - ), - ( - 'anterieur_localite', - 'SituationElectoraleAnterieure_CommuneDePrecedenteInscription_Localite' - ), - ( - 'anterieur_ue_localite', - 'SituationElectoraleAnterieure_PaysUeDerniereInscription_Localite' + 'SituationElectoraleAnterieure_ConsulatDePrecedenteInscription' '_AmbassadeOuPosteConsulaire', ), + ('anterieur_consulat_pays_raw', 'SituationElectoraleAnterieure_ConsulatDePrecedenteInscription_Pays'), + ('anterieur_localite', 'SituationElectoraleAnterieure_CommuneDePrecedenteInscription_Localite'), + ('anterieur_ue_localite', 'SituationElectoraleAnterieure_PaysUeDerniereInscription_Localite'), ( 'anterieur_division_territoriale', - 'SituationElectoraleAnterieure_CommuneDePrecedenteInscription_DivisionTerritoriale' + 'SituationElectoraleAnterieure_CommuneDePrecedenteInscription_DivisionTerritoriale', ), ( 'anterieur_ue_division_territoriale', - 'SituationElectoraleAnterieure_PaysUeDerniereInscription_DivisionTerritoriale' + 'SituationElectoraleAnterieure_PaysUeDerniereInscription_DivisionTerritoriale', ), - ( - 'anterieur_ue_localite_pays_raw', - 'SituationElectoraleAnterieure_PaysUeDerniereInscription_Pays' - ) + ('anterieur_ue_localite_pays_raw', 'SituationElectoraleAnterieure_PaysUeDerniereInscription_Pays'), ] required = [ @@ -369,7 +360,7 @@ class ILEData(Data): 'adresse_code_postal', 'adresse_localite', 'inscription_liste_type', - 'anterieur_situation_raw' + 'anterieur_situation_raw', ] def __init__(self, demand_id, data): @@ -459,7 +450,7 @@ class AECData(Data): 'titulaire_nom', 'titulaire_pere_nom', 'titulaire_pere_prenoms', - 'titulaire_prenoms' + 'titulaire_prenoms', ] def __init__(self, demand_id, data, demand_num): diff --git a/passerelle/apps/mdel/migrations/0001_initial.py b/passerelle/apps/mdel/migrations/0001_initial.py index 9b94f7ea..eca068b5 100644 --- a/passerelle/apps/mdel/migrations/0001_initial.py +++ b/passerelle/apps/mdel/migrations/0001_initial.py @@ -24,12 +24,35 @@ class Migration(migrations.Migration): migrations.CreateModel( name='MDEL', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_demand_users_+', related_query_name='+', blank=True)), + ( + 'log_level', + models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_demand_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': 'Mes Demarches En Ligne', diff --git a/passerelle/apps/mdel/migrations/0003_auto_20170125_0450.py b/passerelle/apps/mdel/migrations/0003_auto_20170125_0450.py index 8d41c7bf..371218c2 100644 --- a/passerelle/apps/mdel/migrations/0003_auto_20170125_0450.py +++ b/passerelle/apps/mdel/migrations/0003_auto_20170125_0450.py @@ -16,13 +16,17 @@ class Migration(migrations.Migration): migrations.AddField( model_name='demand', name='created_at', - field=models.DateTimeField(default=datetime.datetime(2016, 12, 31, 12, 0, 0, 0, tzinfo=utc), auto_now_add=True), + field=models.DateTimeField( + default=datetime.datetime(2016, 12, 31, 12, 0, 0, 0, tzinfo=utc), auto_now_add=True + ), preserve_default=False, ), migrations.AddField( model_name='demand', name='updated_at', - field=models.DateTimeField(default=datetime.datetime(2016, 12, 31, 12, 0, 0, 0, tzinfo=utc), auto_now=True), + field=models.DateTimeField( + default=datetime.datetime(2016, 12, 31, 12, 0, 0, 0, tzinfo=utc), auto_now=True + ), preserve_default=False, ), ] diff --git a/passerelle/apps/mdel/migrations/0006_auto_20210126_1440.py b/passerelle/apps/mdel/migrations/0006_auto_20210126_1440.py index 7cba7afa..4d8c038e 100644 --- a/passerelle/apps/mdel/migrations/0006_auto_20210126_1440.py +++ b/passerelle/apps/mdel/migrations/0006_auto_20210126_1440.py @@ -16,11 +16,15 @@ class Migration(migrations.Migration): migrations.AddField( model_name='mdel', name='incoming_sftp', - field=passerelle.utils.sftp.SFTPField(blank=True, default=None, verbose_name='SFTP server for incoming files'), + field=passerelle.utils.sftp.SFTPField( + blank=True, default=None, verbose_name='SFTP server for incoming files' + ), ), migrations.AddField( model_name='mdel', name='outcoming_sftp', - field=passerelle.utils.sftp.SFTPField(blank=True, default=None, verbose_name='SFTP server for outgoing files'), + field=passerelle.utils.sftp.SFTPField( + blank=True, default=None, verbose_name='SFTP server for outgoing files' + ), ), ] diff --git a/passerelle/apps/mdel/models.py b/passerelle/apps/mdel/models.py index dabb4589..c6f58e57 100644 --- a/passerelle/apps/mdel/models.py +++ b/passerelle/apps/mdel/models.py @@ -42,7 +42,7 @@ STATUS_MAPPING = { '17': 'information needed', '16': 'in progress', '15': 'invalid', - '14': 'imported' + '14': 'imported', } APPLICANTS = [ @@ -54,18 +54,21 @@ APPLICANTS = [ {"id": "PetitFils", "text": "Son petit-fils ou sa petite-fille"}, {"id": "Representant", "text": "Son représentant légal"}, {"id": "Heriter", "text": "Son héritier"}, - {"id": "Autre", "text": "Autre"}] + {"id": "Autre", "text": "Autre"}, +] CERTIFICATES = [ {'id': 'NAISSANCE', 'text': 'Acte de naissance'}, {'id': 'MARIAGE', 'text': 'Acte de mariage'}, - {'id': 'DECES', 'text': 'Acte de décès'}] + {'id': 'DECES', 'text': 'Acte de décès'}, +] CERTIFICATE_TYPES = [ {'id': 'COPIE-INTEGRALE', 'text': 'Copie intégrale'}, {'id': 'EXTRAIT-AVEC-FILIATION', 'text': 'Extrait avec filiation'}, {'id': 'EXTRAIT-SANS-FILIATION', 'text': 'Extrait sans filiation'}, - {'id': 'EXTRAIT-PLURILINGUE', 'text': 'Extrait plurilingue'}] + {'id': 'EXTRAIT-PLURILINGUE', 'text': 'Extrait plurilingue'}, +] class MDEL(BaseResource): @@ -91,8 +94,7 @@ class MDEL(BaseResource): @endpoint(perm='can_access', methods=['post']) def create(self, request, *args, **kwargs): - """Create a demand - """ + """Create a demand""" formdata = json_loads(request.body) extra = formdata.pop('extra', {}) fields = formdata.pop('fields', {}) @@ -133,8 +135,7 @@ class MDEL(BaseResource): @endpoint(perm='can_access') def status(self, request, *args, **kwargs): - """Return demand's statutes - """ + """Return demand's statutes""" demand_id = request.GET.get('demand_id', None) if not demand_id: raise APIError('demand_id is required') @@ -148,8 +149,7 @@ class MDEL(BaseResource): @endpoint(perm='can_access') def applicants(self, request, without=''): - return {'data': [item for item in APPLICANTS - if item.get('id') not in without.split(',')]} + return {'data': [item for item in APPLICANTS if item.get('id') not in without.split(',')]} @endpoint(perm='can_access') def certificates(self, request): @@ -157,18 +157,15 @@ class MDEL(BaseResource): @endpoint(name='certificate-types', perm='can_access') def certificate_types(self, request, without=''): - return {'data': [item for item in CERTIFICATE_TYPES - if item.get('id') not in without.split(',')]} + return {'data': [item for item in CERTIFICATE_TYPES if item.get('id') not in without.split(',')]} @property def input_dir(self): - return os.path.join(mdel.get_resource_base_dir(), - self.slug, 'inputs') + return os.path.join(mdel.get_resource_base_dir(), self.slug, 'inputs') @property def output_dir(self): - return os.path.join(mdel.get_resource_base_dir(), - self.slug, 'outputs') + return os.path.join(mdel.get_resource_base_dir(), self.slug, 'outputs') def send_demand(self, demand_id): try: @@ -176,8 +173,7 @@ class MDEL(BaseResource): except Demand.DoesNotExist: return with self.outcoming_sftp.client() as client: - client.put(demand.filepath, - os.path.join(client.getcwd(), demand.filename)) + client.put(demand.filepath, os.path.join(client.getcwd(), demand.filename)) def get_response_files(self): if not os.path.exists(self.output_dir): @@ -201,8 +197,7 @@ class Demand(models.Model): created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) resource = models.ForeignKey(MDEL, on_delete=models.CASCADE) - num = models.CharField( - max_length=64, null=False, primary_key=True, unique=True) + num = models.CharField(max_length=64, null=False, primary_key=True, unique=True) flow_type = models.CharField(max_length=32, null=False) status = models.CharField(max_length=32, null=True) step = models.IntegerField(default=0) @@ -212,7 +207,7 @@ class Demand(models.Model): return '%s - %s - %s' % (self.resource.slug, self.demand_id, self.status) class Meta: - unique_together = (('num', 'flow_type')) + unique_together = ('num', 'flow_type') def save(self, *args, **kwargs): self.demand_id = '%s-%s' % (self.num, self.flow_type) @@ -251,8 +246,14 @@ class Demand(models.Model): for proof_code, proof_attribute in proofs: - documents = [value for key, value in formdata.items() - if key.startswith(proof_attribute) and isinstance(value, dict) and 'filename' in value and 'content' in value] + documents = [ + value + for key, value in formdata.items() + if key.startswith(proof_attribute) + and isinstance(value, dict) + and 'filename' in value + and 'content' in value + ] if not documents: raise APIError('%s and all its attributes are required' % proof_attribute) for document in documents: @@ -325,8 +326,9 @@ class Demand(models.Model): demandeur_adresse_pays_raw = formdata.get('demandeur_adresse_pays_raw') demandeur_adresse_etrangere = formdata.get('demandeur_adresse_etrangere') demandeur_adresse_etrangere_pays_raw = formdata.get('demandeur_adresse_etrangere_pays_raw') - if (demandeur_adresse_etrangere_pays_raw - or (demandeur_adresse_pays_raw and demandeur_adresse_pays_raw != 'FRA')): + if demandeur_adresse_etrangere_pays_raw or ( + demandeur_adresse_pays_raw and demandeur_adresse_pays_raw != 'FRA' + ): formdata.pop('demandeur_adresse_pays_raw', None) if not demandeur_adresse_etrangere_pays_raw: formdata['demandeur_adresse_etrangere_pays_raw'] = demandeur_adresse_pays_raw @@ -368,16 +370,19 @@ class Demand(models.Model): submission_date = formdata.get('receipt_time', None) - message = mdel.Message( - flow_type, demand_num, code_insee, date=submission_date, - doc=doc - ) + message = mdel.Message(flow_type, demand_num, code_insee, date=submission_date, doc=doc) message.save(inputs_dir) input_files['message'] = message.filename - description = mdel.Description(flow_type, demand_num, code_insee, date=submission_date, - attached_files=attached_files, - step=self.step, doc=doc) + description = mdel.Description( + flow_type, + demand_num, + code_insee, + date=submission_date, + attached_files=attached_files, + step=self.step, + doc=doc, + ) description.save(inputs_dir) input_files['enveloppe'] = description.filename @@ -385,14 +390,9 @@ class Demand(models.Model): return zipdir(inputs_dir, input_files) def get_status(self): - """Read demand' statuses from file - """ + """Read demand' statuses from file""" - result = { - 'closed': False, - 'status': None, - 'comment': '' - } + result = {'closed': False, 'status': None, 'comment': ''} namespace = {'ns2': 'http://finances.gouv.fr/dgme/pec/message/v1'} @@ -430,10 +430,7 @@ class Demand(models.Model): commentaire = maj.findtext('ns2:Commentaire', namespaces=namespace) if etat: - statuses.append({ - 'etat': etat, - 'commentaire': commentaire - }) + statuses.append({'etat': etat, 'commentaire': commentaire}) statuses = sorted(statuses, key=lambda x: int(x['etat']))[-2:] diff --git a/passerelle/apps/mdel/utils.py b/passerelle/apps/mdel/utils.py index 2f860035..093769d5 100644 --- a/passerelle/apps/mdel/utils.py +++ b/passerelle/apps/mdel/utils.py @@ -22,25 +22,23 @@ from django.utils.dateparse import parse_date as django_parse_date from passerelle.utils.jsonresponse import APIError + def parse_date(date): try: parsed_date = django_parse_date(date) except ValueError as e: - raise APIError('Invalid date: %r (%r)' % ( date, e)) + raise APIError('Invalid date: %r (%r)' % (date, e)) if not parsed_date: raise APIError('date %r not iso-formated' % date) return parsed_date.isoformat() class ElementFactory(etree.Element): - def __init__(self, *args, **kwargs): self.text = kwargs.pop('text', None) namespace = kwargs.pop('namespace', None) if namespace: - super(ElementFactory, self).__init__( - etree.QName(namespace, args[0]), **kwargs - ) + super(ElementFactory, self).__init__(etree.QName(namespace, args[0]), **kwargs) self.namespace = namespace else: super(ElementFactory, self).__init__(*args, **kwargs) @@ -65,8 +63,7 @@ class ElementFactory(etree.Element): def zipdir(path, input_files): - """Zip directory - """ + """Zip directory""" basefiles = [] for key in ('message', 'enveloppe', 'demande'): if key in input_files: @@ -88,7 +85,6 @@ def zipdir(path, input_files): def get_file_content_from_zip(path, filename): - """Rreturn file content - """ + """Rreturn file content""" with zipfile.ZipFile(path, 'r') as zipf: return zipf.read(filename) diff --git a/passerelle/apps/mdel_ddpacs/abstract.py b/passerelle/apps/mdel_ddpacs/abstract.py index 9f1629b5..841bb78d 100644 --- a/passerelle/apps/mdel_ddpacs/abstract.py +++ b/passerelle/apps/mdel_ddpacs/abstract.py @@ -44,15 +44,18 @@ from passerelle.utils import xml, sftp MDELStatus = namedtuple('MDELStatus', ['code', 'slug', 'label']) -MDEL_STATUSES = map(lambda t: MDELStatus(*t), [ - ('100', 'closed', _('closed')), - ('20', 'rejected', _('rejected')), - ('19', 'accepted', _('accepted')), - ('17', 'information needed', _('information needed')), - ('16', 'in progress', _('in progress')), - ('15', 'invalid', _('invalid')), - ('14', 'imported', _('imported')), -]) +MDEL_STATUSES = map( + lambda t: MDELStatus(*t), + [ + ('100', 'closed', _('closed')), + ('20', 'rejected', _('rejected')), + ('19', 'accepted', _('accepted')), + ('17', 'information needed', _('information needed')), + ('16', 'in progress', _('in progress')), + ('15', 'invalid', _('invalid')), + ('14', 'imported', _('imported')), + ], +) MDEL_STATUSES_BY_CODE = {mdel_status.code: mdel_status for mdel_status in MDEL_STATUSES} @@ -68,18 +71,10 @@ class Resource(BaseResource): blank=True, help_text=_('MDEL response .zip will be pulled from.'), ) - recipient_siret = models.CharField( - verbose_name=_('SIRET'), - max_length=128) - recipient_service = models.CharField( - verbose_name=_('Service'), - max_length=128) - recipient_guichet = models.CharField( - verbose_name=_('Guichet'), - max_length=128) - code_insee = models.CharField( - verbose_name=_('INSEE Code'), - max_length=6) + recipient_siret = models.CharField(verbose_name=_('SIRET'), max_length=128) + recipient_service = models.CharField(verbose_name=_('Service'), max_length=128) + recipient_guichet = models.CharField(verbose_name=_('Guichet'), max_length=128) + code_insee = models.CharField(verbose_name=_('INSEE Code'), max_length=6) xsd_path = 'schema.xsd' xsd_root_element = None @@ -116,11 +111,13 @@ class Resource(BaseResource): base_schema = cls.get_doc_json_schema() base_schema['unflatten'] = True base_schema['merge_extra'] = True - base_schema['properties'].update({ - 'display_id': {'type': 'string'}, - 'email': {'type': 'string'}, - 'code_insee': {'type': 'string'}, - }) + base_schema['properties'].update( + { + 'display_id': {'type': 'string'}, + 'email': {'type': 'string'}, + 'code_insee': {'type': 'string'}, + } + ) base_schema.setdefault('required', []).append('display_id') if hasattr(cls, 'pre_process_create'): base_schema['pre_process'] = cls.pre_process_create @@ -130,13 +127,11 @@ class Resource(BaseResource): reference = 'A-' + payload['display_id'] with transaction.atomic(): try: - demand = self.demand_set.create( - reference=reference, - step=1, - data=payload) + demand = self.demand_set.create(reference=reference, step=1, data=payload) except IntegrityError as e: - raise APIError('reference-non-unique', http_status=400, - data={'original_exc': exception_to_text(e)}) + raise APIError( + 'reference-non-unique', http_status=400, data={'original_exc': exception_to_text(e)} + ) self.add_job('push_demand', demand_id=demand.id) return self.status(request, demand) @@ -145,10 +140,9 @@ class Resource(BaseResource): if not demand.push(): raise SkipJob(after_timestamp=3600 * 6) - @endpoint(perm='can_access', - methods=['get'], - description=_('Demand status'), - pattern=r'(?P\d+)/$') + @endpoint( + perm='can_access', methods=['get'], description=_('Demand status'), pattern=r'(?P\d+)/$' + ) def demand(self, request, demand_id): try: demand = self.demand_set.get(id=demand_id) @@ -164,10 +158,12 @@ class Resource(BaseResource): 'zip_url': request.build_absolute_uri(demand.zip_url), } - @endpoint(perm='can_access', - methods=['get'], - description=_('Demand document'), - pattern=r'(?P\d+)/.*$') + @endpoint( + perm='can_access', + methods=['get'], + description=_('Demand document'), + pattern=r'(?P\d+)/.*$', + ) def document(self, request, demand_id): try: demand = self.demand_set.get(id=demand_id) @@ -179,9 +175,7 @@ class Resource(BaseResource): @property def response_re(self): - return re.compile( - r'(?P[^-]+-[^-]+-[^-]+)-%s-' - r'(?P\d+).zip' % self.flow_type) + return re.compile(r'(?P[^-]+-[^-]+-[^-]+)-%s-' r'(?P\d+).zip' % self.flow_type) def hourly(self): '''Get responses''' @@ -194,7 +188,9 @@ class Resource(BaseResource): if not m: self.logger.warning( 'pull responses: unexpected file "%s" in sftp, file name does not match pattern %s', - name, self.response_re) + name, + self.response_re, + ) continue reference = m.groupdict()['reference'] step = int(m.groupdict()['step']) @@ -203,14 +199,16 @@ class Resource(BaseResource): self.logger.error( 'pull responses: unexpected file "%s" in sftp, no demand for reference "%s"', name, - reference) + reference, + ) continue if step < demand.step: demand.logger.error( 'pull responses: unexpected file "%s" in sftp: step %s is inferior to demand step %s', name, step, - demand.step) + demand.step, + ) continue demand.handle_response(sftp_client=client, filename=name, step=step) except sftp.paramiko.SSHException as e: @@ -242,9 +240,8 @@ class Demand(models.Model): @functional.cached_property def logger(self): return self.resource.logger.context( - demand_id=self.id, - demand_status=self.status, - demand_reference=self.reference) + demand_id=self.id, demand_status=self.status, demand_reference=self.reference + ) def push(self): if not self.resource.outgoing_sftp: @@ -254,14 +251,10 @@ class Demand(models.Model): with client.open(self.zip_name, mode='w') as fd: fd.write(self.zip_content) except sftp.paramiko.SSHException as e: - self.logger.error('push demand: %s failed, "%s"', - self, - exception_to_text(e)) + self.logger.error('push demand: %s failed, "%s"', self, exception_to_text(e)) self.status = self.STATUS_ERROR except Exception as e: - self.logger.exception('push demand: %s failed, "%s"', - self, - exception_to_text(e)) + self.logger.exception('push demand: %s failed, "%s"', self, exception_to_text(e)) self.status = self.STATUS_ERROR else: self.resource.logger.info('push demand: %s success', self) @@ -271,20 +264,23 @@ class Demand(models.Model): @functional.cached_property def zip_template(self): - return ZipTemplate(self.resource.zip_manifest, ctx={ - 'reference': self.reference, - 'flow_type': self.resource.flow_type, - 'doc_type': self.resource.doc_type, - 'step': '1', # We never create more than one document for a reference - 'siret': self.resource.recipient_siret, - 'service': self.resource.recipient_service, - 'guichet': self.resource.recipient_guichet, - 'code_insee': self.data.get('code_insee', self.resource.code_insee), - 'document': self.document, - 'code_insee_id': self.resource.code_insee_id, - 'date': self.created_at.isoformat(), - 'email': self.data.get('email', ''), - }) + return ZipTemplate( + self.resource.zip_manifest, + ctx={ + 'reference': self.reference, + 'flow_type': self.resource.flow_type, + 'doc_type': self.resource.doc_type, + 'step': '1', # We never create more than one document for a reference + 'siret': self.resource.recipient_siret, + 'service': self.resource.recipient_service, + 'guichet': self.resource.recipient_guichet, + 'code_insee': self.data.get('code_insee', self.resource.code_insee), + 'document': self.document, + 'code_insee_id': self.resource.code_insee_id, + 'date': self.created_at.isoformat(), + 'email': self.data.get('email', ''), + }, + ) @property def zip_name(self): @@ -310,7 +306,8 @@ class Demand(models.Model): 'slug': self.resource.slug, 'endpoint': 'demand', 'rest': '%s/' % self.id, - }) + }, + ) @property def zip_url(self): @@ -320,8 +317,9 @@ class Demand(models.Model): 'connector': self.resource.get_connector_slug(), 'slug': self.resource.slug, 'endpoint': 'document', - 'rest': '%s/%s' % (self.id, self.zip_name) - }) + 'rest': '%s/%s' % (self.id, self.zip_name), + }, + ) def handle_response(self, sftp_client, filename, step): try: @@ -332,18 +330,13 @@ class Demand(models.Model): ns = 'http://finances.gouv.fr/dgme/pec/message/v1' etat_node = tree.find('.//{%s}Etat' % ns) if etat_node is None: - self.logger.error( - 'pull responses: missing Etat node in "%s"', - filename) + self.logger.error('pull responses: missing Etat node in "%s"', filename) return etat = etat_node.text if etat in MDEL_STATUSES_BY_CODE: self.status = MDEL_STATUSES_BY_CODE[etat].slug else: - self.logger.error( - 'pull responses: unknown etat in "%s", etat="%s"', - filename, - etat) + self.logger.error('pull responses: unknown etat in "%s", etat="%s"', filename, etat) return commentaire_node = tree.find('.//{%s}Etat' % ns) if commentaire_node is not None: @@ -353,27 +346,21 @@ class Demand(models.Model): self.data['commentaire'] = commentaire self.step = step + 1 self.save() - self.logger.info('pull responses: status of demand %s changed to %s', - self, self.status) + self.logger.info('pull responses: status of demand %s changed to %s', self, self.status) except sftp.paramiko.SSHException as e: self.logger.error( - 'pull responses: failed to read response "%s", %s', - filename, - exception_to_text(e)) + 'pull responses: failed to read response "%s", %s', filename, exception_to_text(e) + ) else: try: sftp_client.remove(filename) except sftp.paramiko.SSHException as e: self.logger.error( - 'pull responses: failed to remove response "%s", %s', - filename, - exception_to_text(e)) + 'pull responses: failed to remove response "%s", %s', filename, exception_to_text(e) + ) def __str__(self): - return '' % ( - self.id, - self.reference, - self.resource.flow_type) + return '' % (self.id, self.reference, self.resource.flow_type) class Meta: abstract = True diff --git a/passerelle/apps/mdel_ddpacs/migrations/0001_initial.py b/passerelle/apps/mdel_ddpacs/migrations/0001_initial.py index 0c557f0d..535a0df7 100644 --- a/passerelle/apps/mdel_ddpacs/migrations/0001_initial.py +++ b/passerelle/apps/mdel_ddpacs/migrations/0001_initial.py @@ -20,11 +20,33 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Demand', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('reference', models.CharField(max_length=32, unique=True)), - ('status', models.CharField(choices=[('pending', 'pending'), ('pushed', 'pushed'), ('error', 'error'), ('closed', 'closed'), ('rejected', 'rejected'), ('accepted', 'accepted'), ('information needed', 'information needed'), ('in progress', 'in progress'), ('invalid', 'invalid'), ('imported', 'imported')], default='pending', max_length=32, null=True)), + ( + 'status', + models.CharField( + choices=[ + ('pending', 'pending'), + ('pushed', 'pushed'), + ('error', 'error'), + ('closed', 'closed'), + ('rejected', 'rejected'), + ('accepted', 'accepted'), + ('information needed', 'information needed'), + ('in progress', 'in progress'), + ('invalid', 'invalid'), + ('imported', 'imported'), + ], + default='pending', + max_length=32, + null=True, + ), + ), ('step', models.IntegerField(default=0)), ('data', django.contrib.postgres.fields.jsonb.JSONField(default=dict)), ], @@ -35,17 +57,34 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Resource', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), - ('outgoing_sftp', passerelle.utils.sftp.SFTPField(blank=True, default=None, verbose_name='Outcoming SFTP')), - ('incoming_sftp', passerelle.utils.sftp.SFTPField(blank=True, default=None, verbose_name='Incoming SFTP')), + ( + 'outgoing_sftp', + passerelle.utils.sftp.SFTPField(blank=True, default=None, verbose_name='Outcoming SFTP'), + ), + ( + 'incoming_sftp', + passerelle.utils.sftp.SFTPField(blank=True, default=None, verbose_name='Incoming SFTP'), + ), ('recipient_siret', models.CharField(max_length=128, verbose_name='SIRET')), ('recipient_service', models.CharField(max_length=128, verbose_name='Service')), ('recipient_guichet', models.CharField(max_length=128, verbose_name='Guichet')), ('code_insee', models.CharField(max_length=6, verbose_name='INSEE Code')), - ('users', models.ManyToManyField(blank=True, related_name='_resource_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_resource_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'MDEL compatible DDPACS request builder', diff --git a/passerelle/apps/mdel_ddpacs/migrations/0002_auto_20200504_1402.py b/passerelle/apps/mdel_ddpacs/migrations/0002_auto_20200504_1402.py index 82ca99eb..e263116b 100644 --- a/passerelle/apps/mdel_ddpacs/migrations/0002_auto_20200504_1402.py +++ b/passerelle/apps/mdel_ddpacs/migrations/0002_auto_20200504_1402.py @@ -26,16 +26,31 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='demand', name='status', - field=models.CharField(choices=[('pending', 'pending'), ('pushed', 'pushed'), ('error', 'error')], default='pending', max_length=32, null=True), + field=models.CharField( + choices=[('pending', 'pending'), ('pushed', 'pushed'), ('error', 'error')], + default='pending', + max_length=32, + null=True, + ), ), migrations.AlterField( model_name='resource', name='incoming_sftp', - field=passerelle.utils.sftp.SFTPField(blank=True, default=None, help_text='MDEL response .zip will be pulled from.', verbose_name='Incoming SFTP'), + field=passerelle.utils.sftp.SFTPField( + blank=True, + default=None, + help_text='MDEL response .zip will be pulled from.', + verbose_name='Incoming SFTP', + ), ), migrations.AlterField( model_name='resource', name='outgoing_sftp', - field=passerelle.utils.sftp.SFTPField(blank=True, default=None, help_text='MDEL request .zip will be pushed to.', verbose_name='Outgoing SFTP'), + field=passerelle.utils.sftp.SFTPField( + blank=True, + default=None, + help_text='MDEL request .zip will be pushed to.', + verbose_name='Outgoing SFTP', + ), ), ] diff --git a/passerelle/apps/mdel_ddpacs/models.py b/passerelle/apps/mdel_ddpacs/models.py index f68a9e4c..e8d0e210 100644 --- a/passerelle/apps/mdel_ddpacs/models.py +++ b/passerelle/apps/mdel_ddpacs/models.py @@ -50,13 +50,11 @@ class DDPACSSchema(JSONSchemaFromXMLSchema): def schema_double(cls): return { 'anyOf': [ - { - 'type': 'number' - }, + {'type': 'number'}, { 'type': 'string', 'pattern': r'[0-9]*(\.[0-9]*)?', - } + }, ] } @@ -69,7 +67,9 @@ class DDPACSSchema(JSONSchemaFromXMLSchema): def decode_civilite(self, data): for key, value in self.civilite_map.items(): if data.text == value: - return xmlschema.ElementData(tag=data.tag, text=key, content=data.content, attributes=data.attributes) + return xmlschema.ElementData( + tag=data.tag, text=key, content=data.content, attributes=data.attributes + ) raise xmlschema.XMLSchemaValidationError(self, data, reason='civilite invalide %s') def decode_double(self, data): @@ -92,16 +92,12 @@ class Resource(abstract.Resource): class Meta: verbose_name = _('PACS request (MDEL DDPACS)') - @endpoint(perm='can_access', - methods=['post'], - description=_('Create request'), - post={ - 'request_body': { - 'schema': { - 'application/json': None - } - } - }) + @endpoint( + perm='can_access', + methods=['post'], + description=_('Create request'), + post={'request_body': {'schema': {'application/json': None}}}, + ) def create(self, request, post_data): return self._handle_create(request, post_data) @@ -129,9 +125,13 @@ class Resource(abstract.Resource): d[key] = int(d[key]) for key in d: helper(d[key]) + helper(data) -Resource.create.endpoint_info.post['request_body']['schema']['application/json'] = Resource.get_create_schema() + +Resource.create.endpoint_info.post['request_body']['schema'][ + 'application/json' +] = Resource.get_create_schema() class Demand(abstract.Demand): diff --git a/passerelle/apps/mdel_ddpacs/utils.py b/passerelle/apps/mdel_ddpacs/utils.py index 4fc5589e..cb3a4612 100644 --- a/passerelle/apps/mdel_ddpacs/utils.py +++ b/passerelle/apps/mdel_ddpacs/utils.py @@ -22,25 +22,23 @@ from django.utils.dateparse import parse_date as django_parse_date from passerelle.utils.jsonresponse import APIError + def parse_date(date): try: parsed_date = django_parse_date(date) except ValueError as e: - raise APIError('Invalid date: %r (%r)' % ( date, e)) + raise APIError('Invalid date: %r (%r)' % (date, e)) if not parsed_date: raise APIError('date %r not iso-formated' % date) return parsed_date.isoformat() class ElementFactory(etree.Element): - def __init__(self, *args, **kwargs): self.text = kwargs.pop('text', None) namespace = kwargs.pop('namespace', None) if namespace: - super(ElementFactory, self).__init__( - etree.QName(namespace, args[0]), **kwargs - ) + super(ElementFactory, self).__init__(etree.QName(namespace, args[0]), **kwargs) self.namespace = namespace else: super(ElementFactory, self).__init__(*args, **kwargs) @@ -65,8 +63,7 @@ class ElementFactory(etree.Element): def zipdir(path): - """Zip directory - """ + """Zip directory""" archname = path + '.zip' with zipfile.ZipFile(archname, 'w', zipfile.ZIP_DEFLATED) as zipf: for root, dirs, files in os.walk(path): @@ -77,7 +74,6 @@ def zipdir(path): def get_file_content_from_zip(path, filename): - """Rreturn file content - """ + """Rreturn file content""" with zipfile.ZipFile(path, 'r') as zipf: return zipf.read(filename) diff --git a/passerelle/apps/mobyt/admin.py b/passerelle/apps/mobyt/admin.py index 4f82a8ae..b0452e81 100644 --- a/passerelle/apps/mobyt/admin.py +++ b/passerelle/apps/mobyt/admin.py @@ -2,9 +2,10 @@ from django.contrib import admin from .models import MobytSMSGateway + class MobytSMSGatewayAdmin(admin.ModelAdmin): prepopulated_fields = {'slug': ('title',)} - list_display = ['title', 'slug', 'description', 'username', 'password', - 'quality', 'default_country_code'] + list_display = ['title', 'slug', 'description', 'username', 'password', 'quality', 'default_country_code'] + admin.site.register(MobytSMSGateway, MobytSMSGatewayAdmin) diff --git a/passerelle/apps/mobyt/migrations/0001_initial.py b/passerelle/apps/mobyt/migrations/0001_initial.py index b3ececbb..1ce4cfd9 100644 --- a/passerelle/apps/mobyt/migrations/0001_initial.py +++ b/passerelle/apps/mobyt/migrations/0001_initial.py @@ -14,15 +14,37 @@ class Migration(migrations.Migration): migrations.CreateModel( name='MobytSMSGateway', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), ('username', models.CharField(max_length=64, verbose_name='Username')), ('password', models.CharField(max_length=64, verbose_name='Password')), - ('quality', models.CharField(choices=[(b'l', 'sms direct'), (b'll', 'sms low-cost'), (b'n', 'sms top')], default=b'l', max_length=4, verbose_name='Message quality')), - ('default_country_code', models.CharField(default='33', max_length=3, verbose_name='Default country code')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_mobytsmsgateway_users_+', related_query_name='+', blank=True)), + ( + 'quality', + models.CharField( + choices=[(b'l', 'sms direct'), (b'll', 'sms low-cost'), (b'n', 'sms top')], + default=b'l', + max_length=4, + verbose_name='Message quality', + ), + ), + ( + 'default_country_code', + models.CharField(default='33', max_length=3, verbose_name='Default country code'), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_mobytsmsgateway_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'db_table': 'sms_mobyt', diff --git a/passerelle/apps/mobyt/migrations/0002_mobytsmsgateway_log_level.py b/passerelle/apps/mobyt/migrations/0002_mobytsmsgateway_log_level.py index 9ae5cbc3..7d59cf79 100644 --- a/passerelle/apps/mobyt/migrations/0002_mobytsmsgateway_log_level.py +++ b/passerelle/apps/mobyt/migrations/0002_mobytsmsgateway_log_level.py @@ -14,7 +14,13 @@ class Migration(migrations.Migration): migrations.AddField( model_name='mobytsmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Debug Enabled', + blank=True, + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/mobyt/migrations/0003_auto_20160316_0910.py b/passerelle/apps/mobyt/migrations/0003_auto_20160316_0910.py index cd84fbac..e61f314f 100644 --- a/passerelle/apps/mobyt/migrations/0003_auto_20160316_0910.py +++ b/passerelle/apps/mobyt/migrations/0003_auto_20160316_0910.py @@ -14,7 +14,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='mobytsmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/mobyt/migrations/0004_auto_20160407_0456.py b/passerelle/apps/mobyt/migrations/0004_auto_20160407_0456.py index f488e0de..6c895f53 100644 --- a/passerelle/apps/mobyt/migrations/0004_auto_20160407_0456.py +++ b/passerelle/apps/mobyt/migrations/0004_auto_20160407_0456.py @@ -14,7 +14,19 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='mobytsmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/mobyt/migrations/0008_auto_20200310_1539.py b/passerelle/apps/mobyt/migrations/0008_auto_20200310_1539.py index 92052fee..3a257ef3 100644 --- a/passerelle/apps/mobyt/migrations/0008_auto_20200310_1539.py +++ b/passerelle/apps/mobyt/migrations/0008_auto_20200310_1539.py @@ -20,6 +20,11 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='mobytsmsgateway', name='quality', - field=models.CharField(choices=[('l', 'sms direct'), ('ll', 'sms low-cost'), ('n', 'sms top')], default='l', max_length=4, verbose_name='Message quality'), + field=models.CharField( + choices=[('l', 'sms direct'), ('ll', 'sms low-cost'), ('n', 'sms top')], + default='l', + max_length=4, + verbose_name='Message quality', + ), ), ] diff --git a/passerelle/apps/mobyt/models.py b/passerelle/apps/mobyt/models.py index 70dc072e..33c461c2 100644 --- a/passerelle/apps/mobyt/models.py +++ b/passerelle/apps/mobyt/models.py @@ -16,8 +16,9 @@ class MobytSMSGateway(SMSResource): ) username = models.CharField(verbose_name=_('Username'), max_length=64) password = models.CharField(verbose_name=_('Password'), max_length=64) - quality = models.CharField(max_length=4, choices=MESSAGES_QUALITIES, default='l', - verbose_name=_('Message quality')) + quality = models.CharField( + max_length=4, choices=MESSAGES_QUALITIES, default='l', verbose_name=_('Message quality') + ) TEST_DEFAULTS = { 'create_kwargs': { @@ -30,11 +31,9 @@ class MobytSMSGateway(SMSResource): 'result': { 'err': 1, 'err_desc': 'MobyT error: response is not "OK"', - } + }, } - ], - } class Meta: diff --git a/passerelle/apps/okina/migrations/0001_initial.py b/passerelle/apps/okina/migrations/0001_initial.py index f75d0957..a4d924c5 100644 --- a/passerelle/apps/okina/migrations/0001_initial.py +++ b/passerelle/apps/okina/migrations/0001_initial.py @@ -14,15 +14,43 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Okina', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('service_url', models.URLField(help_text='Okina API base URL', max_length=256, verbose_name='Service URL')), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + 'service_url', + models.URLField( + help_text='Okina API base URL', max_length=256, verbose_name='Service URL' + ), + ), ('username', models.CharField(max_length=128, verbose_name='Username')), ('password', models.CharField(max_length=128, verbose_name='Password')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_okina_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_okina_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': 'Okina', diff --git a/passerelle/apps/okina/models.py b/passerelle/apps/okina/models.py index 079c16df..f94f9fb5 100644 --- a/passerelle/apps/okina/models.py +++ b/passerelle/apps/okina/models.py @@ -28,8 +28,9 @@ from passerelle.utils.jsonresponse import APIError class Okina(BaseResource): - service_url = models.URLField(max_length=256, blank=False, verbose_name=_('Service URL'), - help_text=_('Okina API base URL')) + service_url = models.URLField( + max_length=256, blank=False, verbose_name=_('Service URL'), help_text=_('Okina API base URL') + ) username = models.CharField(max_length=128, blank=False, verbose_name=_('Username')) password = models.CharField(max_length=128, blank=False, verbose_name=_('Password')) @@ -93,10 +94,9 @@ class Okina(BaseResource): @endpoint() def classes(self, request): - return {'data': [{ - 'id': '%s' % item['id'], - 'text': item['label'] - } for item in self.request('classes')]} + return { + 'data': [{'id': '%s' % item['id'], 'text': item['label']} for item in self.request('classes')] + } def get_institutions(self, query=''): okina_institutions = self.request('institutions' + query) @@ -112,9 +112,11 @@ class Okina(BaseResource): institutions.sort(key=lambda x: x['text']) return institutions - @endpoint(parameters={ + @endpoint( + parameters={ 'insee': {'description': _('INSEE City code'), 'example_value': '36005'}, - }) + } + ) def institutions(self, request, insee=None): if insee: query = '?' + urlencode({'inseeCode': insee}) @@ -127,22 +129,24 @@ class Okina(BaseResource): return {'data': self.get_institutions('/subscriberCity/%s' % city_insee_code)} @endpoint( - name='search', perm='can_access', - description=_('Get stop points based on a starting position and an arrival institution (API 2020)'), - parameters={ - 'lat': {'description': _('Latitude (departure)'), 'example_value': '46.828652'}, - 'lon': {'description': _('Longitude (departure)'), 'example_value': '1.701463'}, - 'address': {'description': _('Address (departure)')}, - 'institution': {'description': _('Institution ID (arrival)'), 'example_value': '277'}, - 'mode': {'description': _('Search mode: CLOSE_SCHOLAR (default) = 3km, FAR_ALL = 15km')}, - }) + name='search', + perm='can_access', + description=_('Get stop points based on a starting position and an arrival institution (API 2020)'), + parameters={ + 'lat': {'description': _('Latitude (departure)'), 'example_value': '46.828652'}, + 'lon': {'description': _('Longitude (departure)'), 'example_value': '1.701463'}, + 'address': {'description': _('Address (departure)')}, + 'institution': {'description': _('Institution ID (arrival)'), 'example_value': '277'}, + 'mode': {'description': _('Search mode: CLOSE_SCHOLAR (default) = 3km, FAR_ALL = 15km')}, + }, + ) def search(self, request, lat, lon, institution, address='', mode='CLOSE_SCHOLAR'): payload = { - 'from-lat': lat.replace(',', '.'), - 'from-long': lon.replace(',', '.'), - 'from-address': address, - 'institution-id': institution, - 'type': mode, + 'from-lat': lat.replace(',', '.'), + 'from-long': lon.replace(',', '.'), + 'from-address': address, + 'institution-id': institution, + 'type': mode, } stops = self.request('wishes/search', payload) for stop in stops: @@ -152,11 +156,14 @@ class Okina(BaseResource): stop['lon'] = stop.get('longitude') return {'data': stops} - @endpoint(name='stop-areas', - pattern='^from-city/(?P\d+)/to-institution/(?P\d+)/*$') + @endpoint( + name='stop-areas', + pattern='^from-city/(?P\d+)/to-institution/(?P\d+)/*$', + ) def stop_areas(self, request, city_insee_code, institution_id): - stops = self.request('stop-areas/subscriberCity/%s/institution/%s' % (city_insee_code, - institution_id)) + stops = self.request( + 'stop-areas/subscriberCity/%s/institution/%s' % (city_insee_code, institution_id) + ) for stop in stops: stop['id'] = '%s' % stop['id'] stop['text'] = stop['commercial_name'] @@ -174,27 +181,29 @@ class Okina(BaseResource): text = journey['okinaVehicleJourney']['okinaJourneyPattern']['publishedName'] else: text = identifier - ods.append({ - 'id': '%s-%s-%s' % (base_id, journey['id'], - journey['okinaVehicleJourney']['id']), - 'text': text, - 'vehicle_journey_id': '%s' % journey['okinaVehicleJourney']['id'], - 'object_id': journey['okinaVehicleJourney']['objectId'], - 'identifier': identifier - }) + ods.append( + { + 'id': '%s-%s-%s' % (base_id, journey['id'], journey['okinaVehicleJourney']['id']), + 'text': text, + 'vehicle_journey_id': '%s' % journey['okinaVehicleJourney']['id'], + 'object_id': journey['okinaVehicleJourney']['objectId'], + 'identifier': identifier, + } + ) return {'data': ods} @endpoint(name='origin-destinations') def origin_destinations(self, request): return self.get_ods() - @endpoint(name='origin-destinations', - pattern='^to-institution/(?P\d+)/*$') + @endpoint(name='origin-destinations', pattern='^to-institution/(?P\d+)/*$') def origin_destinations_to_institution(self, request, institution_id): return self.get_ods('/institution/%s' % institution_id) - @endpoint(name='origin-destinations', - pattern='^from-stop-area/(?P\d+)/to-institution/(?P\d+)/*$') + @endpoint( + name='origin-destinations', + pattern='^from-stop-area/(?P\d+)/to-institution/(?P\d+)/*$', + ) def origin_destinations_from_stop_to_institution(self, request, stop_area_id, institution_id): endpoint = 'ods/institution/%s/stop-area/%s' % (institution_id, stop_area_id) okina_journeys = self.request(endpoint) @@ -203,30 +212,35 @@ class Okina(BaseResource): journey = { 'id': str(n), 'text': ' + '.join(line['name'] for line in okina_journey), - 'lines': [{ + 'lines': [ + { 'id': str(line['id']), 'text': line['name'], - } for line in okina_journey], + } + for line in okina_journey + ], } journeys.append(journey) return {'data': journeys} - @endpoint(name='origin-destinations', - pattern='^from-city/(?P\d+)/to-institution/(?P\d+)/*$') + @endpoint( + name='origin-destinations', + pattern='^from-city/(?P\d+)/to-institution/(?P\d+)/*$', + ) def origin_destinations_from_city_to_institution(self, request, city_insee_code, institution_id): return self.get_ods('/institution/%s/subscriberCity/%s' % (institution_id, city_insee_code)) - @endpoint(name='origin-destinations', - pattern='^from-city/(?P\d+)/*$') + @endpoint(name='origin-destinations', pattern='^from-city/(?P\d+)/*$') def origin_destinations_from_city(self, request, city_insee_code): return self.get_ods('/subscriberCity/%s' % city_insee_code) @endpoint(name='topology', pattern='^(?P(lines|networks|vehicle-journeys))/*$') def topology(self, request, kind): - return {'data': [{ - 'id': '%s' % item['id'], - 'text': item['name'] - } for item in self.request('topology/%s' % kind)]} + return { + 'data': [ + {'id': '%s' % item['id'], 'text': item['name']} for item in self.request('topology/%s' % kind) + ] + } @endpoint(name='subscriber', methods=['post'], perm='can_access') def create_subscriber(self, request): @@ -238,21 +252,17 @@ class Okina(BaseResource): raise APIError('payload must be a dict', http_status=400) return {'data': self.request('subscribers', payload, result_is_list=False)} - @endpoint(name='subscriber', pattern='^(?P\d+)/*$', - methods=['get'], perm='can_access') + @endpoint(name='subscriber', pattern='^(?P\d+)/*$', methods=['get'], perm='can_access') def get_subscriber(self, request, subscriber_id): return {'data': self.request('subscribers/%s' % subscriber_id, result_is_list=False)} - @endpoint(name='subscriber', pattern='^(?P\d+)/qrcode/*$', - perm='can_access') + @endpoint(name='subscriber', pattern='^(?P\d+)/qrcode/*$', perm='can_access') def get_subscriber_qrcode(self, request, subscriber_id): qrcode = self.request('subscribers/%s/qrcode' % subscriber_id, result_is_json=False) content_type = qrcode.headers.get('Content-Type') if not (content_type and content_type.startswith('image/')): response = json_loads(qrcode.content) - raise APIError(response['message'], - http_status=response['status'], - err=response['code']) + raise APIError(response['message'], http_status=response['status'], err=response['code']) return HttpResponse(qrcode.content, content_type=content_type) @endpoint(name='subscription', methods=['post'], perm='can_access') @@ -273,7 +283,6 @@ class Okina(BaseResource): return {'data': subscriptions} return {'data': self.request('subscriptions', payload, result_is_list=False)} - @endpoint(name='subscription', pattern='^(?P\d+)/*$', - methods=['get'], perm='can_access') + @endpoint(name='subscription', pattern='^(?P\d+)/*$', methods=['get'], perm='can_access') def get_subscription(self, request, subscription_id): return {'data': self.request('subscriptions/%s' % subscription_id, result_is_list=False)} diff --git a/passerelle/apps/opendatasoft/migrations/0001_initial.py b/passerelle/apps/opendatasoft/migrations/0001_initial.py index eeebc84d..b2466890 100644 --- a/passerelle/apps/opendatasoft/migrations/0001_initial.py +++ b/passerelle/apps/opendatasoft/migrations/0001_initial.py @@ -19,13 +19,37 @@ class Migration(migrations.Migration): migrations.CreateModel( name='OpenDataSoft', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), ('description', models.TextField(verbose_name='Description')), - ('service_url', models.CharField(help_text='OpenData Web Service URL', max_length=256, verbose_name='Service URL')), - ('api_key', models.CharField(blank=True, help_text='API key used as credentials', max_length=128, verbose_name='API key')), - ('users', models.ManyToManyField(blank=True, related_name='_opendatasoft_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'service_url', + models.CharField( + help_text='OpenData Web Service URL', max_length=256, verbose_name='Service URL' + ), + ), + ( + 'api_key', + models.CharField( + blank=True, + help_text='API key used as credentials', + max_length=128, + verbose_name='API key', + ), + ), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_opendatasoft_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'OpenDataSoft Web Service', @@ -34,13 +58,35 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Query', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('name', models.CharField(max_length=128, verbose_name='Name')), ('slug', models.SlugField(max_length=128, verbose_name='Slug')), ('description', models.TextField(blank=True, verbose_name='Description')), - ('dataset', models.CharField(help_text='dataset to query', max_length=128, verbose_name='Dataset')), - ('text_template', models.TextField(blank=True, help_text="Use Django's template syntax. Attributes can be accessed through {{ attributes.name }}", validators=[passerelle.utils.templates.validate_template], verbose_name='Text template')), - ('resource', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='queries', to='opendatasoft.OpenDataSoft', verbose_name='Resource')), + ( + 'dataset', + models.CharField(help_text='dataset to query', max_length=128, verbose_name='Dataset'), + ), + ( + 'text_template', + models.TextField( + blank=True, + help_text="Use Django's template syntax. Attributes can be accessed through {{ attributes.name }}", + validators=[passerelle.utils.templates.validate_template], + verbose_name='Text template', + ), + ), + ( + 'resource', + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name='queries', + to='opendatasoft.OpenDataSoft', + verbose_name='Resource', + ), + ), ], options={ 'verbose_name': 'Query', diff --git a/passerelle/apps/opendatasoft/models.py b/passerelle/apps/opendatasoft/models.py index a3f828c1..1be003b9 100644 --- a/passerelle/apps/opendatasoft/models.py +++ b/passerelle/apps/opendatasoft/models.py @@ -30,12 +30,14 @@ from passerelle.utils.api import endpoint class OpenDataSoft(BaseResource): service_url = models.CharField( _('Site URL'), - max_length=256, blank=False, + max_length=256, + blank=False, help_text=_('URL without ending "api/records/1.0/search/"'), ) api_key = models.CharField( _('API key'), - max_length=128, blank=True, + max_length=128, + blank=True, help_text=_('API key used as credentials'), ) @@ -72,7 +74,8 @@ class OpenDataSoft(BaseResource): 'id': {'description': _('Record identifier')}, 'q': {'description': _('Full text query')}, 'limit': {'description': _('Maximum items')}, - }) + }, + ) def search(self, request, dataset=None, text_template='', id=None, q=None, limit=None, **kwargs): scheme, netloc, path, params, query, fragment = urlparse.urlparse(self.service_url) path = urlparse.urljoin(path, 'api/records/1.0/search/') @@ -108,11 +111,13 @@ class OpenDataSoft(BaseResource): return {'data': result} - @endpoint(name='q', - description=_('Query'), - pattern=r'^(?P[\w:_-]+)/$', - perm='can_access', - show=False) + @endpoint( + name='q', + description=_('Query'), + pattern=r'^(?P[\w:_-]+)/$', + perm='can_access', + show=False, + ) def q(self, request, query_slug, **kwargs): query = get_object_or_404(Query, resource=self, slug=query_slug) return query.q(request, **kwargs) @@ -123,30 +128,26 @@ class OpenDataSoft(BaseResource): class Query(BaseQuery): resource = models.ForeignKey( - to=OpenDataSoft, - related_name='queries', - verbose_name=_('Resource'), - on_delete=models.CASCADE) + to=OpenDataSoft, related_name='queries', verbose_name=_('Resource'), on_delete=models.CASCADE + ) dataset = models.CharField( _('Dataset'), - max_length=128, blank=False, + max_length=128, + blank=False, help_text=_('dataset to query'), ) text_template = models.TextField( verbose_name=_('Text template'), - help_text=_( - "Use Django's template syntax. Attributes can be accessed through {{ attributes.name }}" - ), + help_text=_("Use Django's template syntax. Attributes can be accessed through {{ attributes.name }}"), validators=[validate_template], - blank=True + blank=True, ) delete_view = 'opendatasoft-query-delete' edit_view = 'opendatasoft-query-edit' def q(self, request, **kwargs): - return self.resource.search( - request, dataset=self.dataset, text_template=self.text_template, **kwargs) + return self.resource.search(request, dataset=self.dataset, text_template=self.text_template, **kwargs) def as_endpoint(self): endpoint = super(Query, self).as_endpoint(path=self.resource.q.endpoint_info.name) diff --git a/passerelle/apps/opendatasoft/urls.py b/passerelle/apps/opendatasoft/urls.py index 1018ba18..cb66560a 100644 --- a/passerelle/apps/opendatasoft/urls.py +++ b/passerelle/apps/opendatasoft/urls.py @@ -19,10 +19,11 @@ from django.conf.urls import url from . import views management_urlpatterns = [ - url(r'^(?P[\w,-]+)/query/new/$', - views.QueryNew.as_view(), name='opendatasoft-query-new'), - url(r'^(?P[\w,-]+)/query/(?P\d+)/$', - views.QueryEdit.as_view(), name='opendatasoft-query-edit'), - url(r'^(?P[\w,-]+)/query/(?P\d+)/delete/$', - views.QueryDelete.as_view(), name='opendatasoft-query-delete'), + url(r'^(?P[\w,-]+)/query/new/$', views.QueryNew.as_view(), name='opendatasoft-query-new'), + url(r'^(?P[\w,-]+)/query/(?P\d+)/$', views.QueryEdit.as_view(), name='opendatasoft-query-edit'), + url( + r'^(?P[\w,-]+)/query/(?P\d+)/delete/$', + views.QueryDelete.as_view(), + name='opendatasoft-query-delete', + ), ] diff --git a/passerelle/apps/opengis/migrations/0001_initial.py b/passerelle/apps/opengis/migrations/0001_initial.py index 3062d302..73c2964c 100644 --- a/passerelle/apps/opengis/migrations/0001_initial.py +++ b/passerelle/apps/opengis/migrations/0001_initial.py @@ -14,14 +14,37 @@ class Migration(migrations.Migration): migrations.CreateModel( name='OpenGIS', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), + ( + 'log_level', + models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), ('service_root_url', models.URLField(max_length=256, verbose_name='Service Root URL')), ('query_layer', models.CharField(max_length=256, verbose_name='Query Layer')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_opengis_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_opengis_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': 'OpenGIS', diff --git a/passerelle/apps/opengis/migrations/0004_auto_20180219_1613.py b/passerelle/apps/opengis/migrations/0004_auto_20180219_1613.py index 2643bc30..36f161c1 100644 --- a/passerelle/apps/opengis/migrations/0004_auto_20180219_1613.py +++ b/passerelle/apps/opengis/migrations/0004_auto_20180219_1613.py @@ -14,6 +14,16 @@ class Migration(migrations.Migration): migrations.AddField( model_name='opengis', name='projection', - field=models.CharField(choices=[(b'EPSG:2154', 'EPSG:2154 (Lambert-93)'), (b'EPSG:3857', 'EPSG:3857 (WGS 84 / Pseudo-Mercator)'), (b'EPSG:3945', 'EPSG:3945 (CC45)'), (b'EPSG:4326', 'EPSG:4326 (WGS 84)')], default=b'EPSG:3857', max_length=16, verbose_name='GIS projection'), + field=models.CharField( + choices=[ + (b'EPSG:2154', 'EPSG:2154 (Lambert-93)'), + (b'EPSG:3857', 'EPSG:3857 (WGS 84 / Pseudo-Mercator)'), + (b'EPSG:3945', 'EPSG:3945 (CC45)'), + (b'EPSG:4326', 'EPSG:4326 (WGS 84)'), + ], + default=b'EPSG:3857', + max_length=16, + verbose_name='GIS projection', + ), ), ] diff --git a/passerelle/apps/opengis/migrations/0007_auto_20200401_1032.py b/passerelle/apps/opengis/migrations/0007_auto_20200401_1032.py index 65f2f80e..024556a7 100644 --- a/passerelle/apps/opengis/migrations/0007_auto_20200401_1032.py +++ b/passerelle/apps/opengis/migrations/0007_auto_20200401_1032.py @@ -16,7 +16,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Query', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('name', models.CharField(max_length=128, verbose_name='Name')), ('slug', models.SlugField(max_length=128, verbose_name='Slug')), ('description', models.TextField(blank=True, verbose_name='Description')), @@ -31,12 +34,27 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='opengis', name='projection', - field=models.CharField(choices=[('EPSG:2154', 'EPSG:2154 (Lambert-93)'), ('EPSG:3857', 'EPSG:3857 (WGS 84 / Pseudo-Mercator)'), ('EPSG:3945', 'EPSG:3945 (CC45)'), ('EPSG:4326', 'EPSG:4326 (WGS 84)')], default='EPSG:3857', max_length=16, verbose_name='GIS projection'), + field=models.CharField( + choices=[ + ('EPSG:2154', 'EPSG:2154 (Lambert-93)'), + ('EPSG:3857', 'EPSG:3857 (WGS 84 / Pseudo-Mercator)'), + ('EPSG:3945', 'EPSG:3945 (CC45)'), + ('EPSG:4326', 'EPSG:4326 (WGS 84)'), + ], + default='EPSG:3857', + max_length=16, + verbose_name='GIS projection', + ), ), migrations.AddField( model_name='query', name='resource', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='queries', to='opengis.OpenGIS', verbose_name='Resource'), + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name='queries', + to='opengis.OpenGIS', + verbose_name='Resource', + ), ), migrations.AlterUniqueTogether( name='query', diff --git a/passerelle/apps/opengis/migrations/0008_featurecache.py b/passerelle/apps/opengis/migrations/0008_featurecache.py index bbf48c51..ccd45c2c 100644 --- a/passerelle/apps/opengis/migrations/0008_featurecache.py +++ b/passerelle/apps/opengis/migrations/0008_featurecache.py @@ -17,11 +17,22 @@ class Migration(migrations.Migration): migrations.CreateModel( name='FeatureCache', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('lat', models.FloatField()), ('lon', models.FloatField()), ('data', django.contrib.postgres.fields.jsonb.JSONField(default=dict)), - ('query', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='features', to='opengis.Query', verbose_name='Query')), + ( + 'query', + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name='features', + to='opengis.Query', + verbose_name='Query', + ), + ), ], ), ] diff --git a/passerelle/apps/opengis/migrations/0009_auto_20200407_1544.py b/passerelle/apps/opengis/migrations/0009_auto_20200407_1544.py index a8e20add..24e1b86d 100644 --- a/passerelle/apps/opengis/migrations/0009_auto_20200407_1544.py +++ b/passerelle/apps/opengis/migrations/0009_auto_20200407_1544.py @@ -21,6 +21,11 @@ class Migration(migrations.Migration): migrations.AddField( model_name='query', name='index_properties', - field=models.CharField(blank=True, help_text='Comma separated list such as property1,property2', max_length=1024, verbose_name='Properties for searching'), + field=models.CharField( + blank=True, + help_text='Comma separated list such as property1,property2', + max_length=1024, + verbose_name='Properties for searching', + ), ), ] diff --git a/passerelle/apps/opengis/migrations/0010_auto_20200504_1402.py b/passerelle/apps/opengis/migrations/0010_auto_20200504_1402.py index b9ccf7cc..a5f9fa00 100644 --- a/passerelle/apps/opengis/migrations/0010_auto_20200504_1402.py +++ b/passerelle/apps/opengis/migrations/0010_auto_20200504_1402.py @@ -25,6 +25,11 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='opengis', name='query_layer', - field=models.CharField(blank=True, help_text='Corresponds to a WMS layer name and/or a WFS feature type.', max_length=256, verbose_name='Query Layer'), + field=models.CharField( + blank=True, + help_text='Corresponds to a WMS layer name and/or a WFS feature type.', + max_length=256, + verbose_name='Query Layer', + ), ), ] diff --git a/passerelle/apps/opengis/migrations/0012_query_indexing_template.py b/passerelle/apps/opengis/migrations/0012_query_indexing_template.py index 93fa4cea..91d418b4 100644 --- a/passerelle/apps/opengis/migrations/0012_query_indexing_template.py +++ b/passerelle/apps/opengis/migrations/0012_query_indexing_template.py @@ -8,7 +8,9 @@ from django.db import migrations, models def create_indexing_template(apps, schema_editor): Query = apps.get_model('opengis', 'Query') for query in Query.objects.all(): - query.indexing_template = ' '.join(['{{ %s|default:"" }}' % x.strip() for x in query.index_properties.split(',') if x.strip()]) + query.indexing_template = ' '.join( + ['{{ %s|default:"" }}' % x.strip() for x in query.index_properties.split(',') if x.strip()] + ) query.save() @@ -22,7 +24,11 @@ class Migration(migrations.Migration): migrations.AddField( model_name='query', name='indexing_template', - field=models.TextField(blank=True, help_text='Template used to generate contents used in fulltext search.', verbose_name='Indexing template'), + field=models.TextField( + blank=True, + help_text='Template used to generate contents used in fulltext search.', + verbose_name='Indexing template', + ), ), migrations.RunPython(create_indexing_template, lambda x, y: None), ] diff --git a/passerelle/apps/opengis/models.py b/passerelle/apps/opengis/models.py index d53a5646..83c75fb4 100644 --- a/passerelle/apps/opengis/models.py +++ b/passerelle/apps/opengis/models.py @@ -51,6 +51,7 @@ def build_dict_from_xml(elem): d[attribute_name] = build_dict_from_xml(child) return d + PROJECTIONS = ( ('EPSG:2154', _('EPSG:2154 (Lambert-93)')), ('EPSG:3857', _('EPSG:3857 (WGS 84 / Pseudo-Mercator)')), @@ -69,15 +70,16 @@ class OpenGIS(BaseResource): help_text=_('Corresponds to a WMS layer name and/or a WFS feature type.'), blank=True, ) - projection = models.CharField(_('GIS projection'), choices=PROJECTIONS, - default='EPSG:3857', max_length=16) + projection = models.CharField( + _('GIS projection'), choices=PROJECTIONS, default='EPSG:3857', max_length=16 + ) search_radius = models.IntegerField(_('Radius for point search'), default=5) attributes_mapping = ( ('road', ('road', 'road_name', 'street', 'street_name', 'voie', 'nom_voie', 'rue')), ('city', ('city', 'city_name', 'town', 'town_name', 'commune', 'nom_commune', 'ville', 'nom_ville')), ('house_number', ('house_number', 'number', 'numero', 'numero_voie', 'numero_rue')), ('postcode', ('postcode', 'postalCode', 'zipcode', 'codepostal', 'cp', 'code_postal', 'code_post')), - ('country', ('country', 'country_name', 'pays', 'nom_pays')) + ('country', ('country', 'country_name', 'pays', 'nom_pays')), ) class Meta: @@ -118,9 +120,12 @@ class OpenGIS(BaseResource): response = self.get_capabilities('wfs', self.wfs_service_url) element = ET.fromstring(response.content) ns = {'ows': 'http://www.opengis.net/ows/1.1'} - formats = element.findall('.//ows:Operation[@name="GetFeature"]/' - 'ows:Parameter[@name="outputFormat"]/' - 'ows:AllowedValues/ows:Value', ns) + formats = element.findall( + './/ows:Operation[@name="GetFeature"]/' + 'ows:Parameter[@name="outputFormat"]/' + 'ows:AllowedValues/ows:Value', + ns, + ) for output_format in formats: if 'json' in output_format.text.lower(): cache.set(cache_key, output_format.text, 3600) @@ -138,23 +143,16 @@ class OpenGIS(BaseResource): def check_status(self): if self.wms_service_url: response = self.requests.get( - self.wms_service_url, - params={ - 'service': 'WMS', - 'request': 'GetCapabilities' - }) + self.wms_service_url, params={'service': 'WMS', 'request': 'GetCapabilities'} + ) response.raise_for_status() if self.wfs_service_url: response = self.requests.get( - self.wfs_service_url, - params={ - 'service': 'WFS', - 'request': 'GetCapabilities' - }) + self.wfs_service_url, params={'service': 'WFS', 'request': 'GetCapabilities'} + ) response.raise_for_status() - def build_get_features_params(self, typename=None, property_name=None, cql_filter=None, - xml_filter=None): + def build_get_features_params(self, typename=None, property_name=None, cql_filter=None, xml_filter=None): params = { 'version': self.get_wfs_service_version(), 'service': 'WFS', @@ -170,41 +168,46 @@ class OpenGIS(BaseResource): params['filter'] = xml_filter return params - @endpoint(perm='can_access', description=_('Get features'), - parameters={ - 'type_names': { - 'description': _('Type of feature to query. Defaults to globally defined ' - 'query layer'), - 'example_value': 'feature' - }, - 'property_name': { - 'description': _('Property to list'), - 'example_value': 'nom_commune' - }, - 'cql_filter': { - 'description': _('CQL filter applied to the query'), - 'example_value': 'commune=\'Paris\'' - }, - 'filter_property_name': { - 'description': _('Property by which to filter'), - 'example_value': 'voie' - }, - 'q': { - 'description': _('Filter value'), - 'example_value': 'rue du chateau' - }, - 'case_insensitive': { - 'description': _('Enables case-insensitive search'), - }, - 'xml_filter': { - 'description': _('Filter applied to the query'), - 'example_value': 'typeparking' - '' - } - }) - def features(self, request, property_name, type_names=None, cql_filter=None, - filter_property_name=None, q=None, case_insensitive=False, - xml_filter=None, **kwargs): + @endpoint( + perm='can_access', + description=_('Get features'), + parameters={ + 'type_names': { + 'description': _('Type of feature to query. Defaults to globally defined ' 'query layer'), + 'example_value': 'feature', + }, + 'property_name': {'description': _('Property to list'), 'example_value': 'nom_commune'}, + 'cql_filter': { + 'description': _('CQL filter applied to the query'), + 'example_value': 'commune=\'Paris\'', + }, + 'filter_property_name': { + 'description': _('Property by which to filter'), + 'example_value': 'voie', + }, + 'q': {'description': _('Filter value'), 'example_value': 'rue du chateau'}, + 'case_insensitive': { + 'description': _('Enables case-insensitive search'), + }, + 'xml_filter': { + 'description': _('Filter applied to the query'), + 'example_value': 'typeparking' + '', + }, + }, + ) + def features( + self, + request, + property_name, + type_names=None, + cql_filter=None, + filter_property_name=None, + q=None, + case_insensitive=False, + xml_filter=None, + **kwargs, + ): if cql_filter: if filter_property_name and q: if 'case-insensitive' in kwargs or case_insensitive: @@ -220,8 +223,9 @@ class OpenGIS(BaseResource): except ValueError: self.handle_opengis_error(response) # if handle_opengis_error did not raise an error, we raise a generic one - raise APIError(u'OpenGIS Error: unparsable error', - data={'content': repr(response.content[:1024])}) + raise APIError( + u'OpenGIS Error: unparsable error', data={'content': repr(response.content[:1024])} + ) for feature in response['features']: feature['text'] = feature['properties'].get(property_name) data.append(feature) @@ -243,8 +247,7 @@ class OpenGIS(BaseResource): return None content = exception_text.text content = html.unescape(content) - raise APIError(u'OpenGIS Error: %s' % exception_code or 'unknown code', - data={'text': content}) + raise APIError(u'OpenGIS Error: %s' % exception_code or 'unknown code', data={'text': content}) def convert_coordinates(self, lon, lat, reverse=False): lon, lat = float(lon), float(lat) @@ -267,13 +270,15 @@ class OpenGIS(BaseResource): x2, y2 = pyproj.transform(wgs84, target_projection, lon2, lat2) return '%.6f,%.6f,%.6f,%.6f' % (x1, y1, x2, y2) - @endpoint(perm='can_access', - description=_('Get feature info'), - parameters={ - 'lat': {'description': _('Latitude'), 'example_value': '45.79689'}, - 'lon': {'description': _('Longitude'), 'example_value': '4.78414'}, - 'query_layer': {'description': _('Defaults to globally defined query layer')}, - }) + @endpoint( + perm='can_access', + description=_('Get feature info'), + parameters={ + 'lat': {'description': _('Latitude'), 'example_value': '45.79689'}, + 'lon': {'description': _('Longitude'), 'example_value': '4.78414'}, + 'query_layer': {'description': _('Defaults to globally defined query layer')}, + }, + ) def feature_info(self, request, lat, lon, query_layer=None): try: lat, lon = float(lat), float(lon) @@ -286,8 +291,10 @@ class OpenGIS(BaseResource): 'request': 'GetFeatureInfo', 'info_format': 'application/vnd.ogc.gml', 'styles': '', - 'i': '24', 'J': '24', # pixel in the middle of - 'height': '50', 'WIDTH': '50', # a 50x50 square + 'i': '24', + 'J': '24', # pixel in the middle of + 'height': '50', + 'WIDTH': '50', # a 50x50 square 'crs': self.projection, 'layers': query_layer or self.query_layer, 'query_layers': query_layer or self.query_layer, @@ -298,15 +305,18 @@ class OpenGIS(BaseResource): return {'err': 0, 'data': build_dict_from_xml(element)} # https://carton.entrouvert.org/hydda-tiles/16/33650/23378.pn - @endpoint(name='tile', pattern=r'^(?P\d+)/(?P\d+)/(?P\d+).png', - description=_('Get Map Tile'), - example_pattern='{zoom}/{tile_x}/{tile_y}.png', - parameters={ - 'zoom': {'description': _('Zoom Level'), 'example_value': '16'}, - 'tile_x': {'description': _('X Coordinate'), 'example_value': '33650'}, - 'tile_y': {'description': _('Y Coordinate'), 'example_value': '23378'}, - 'query_layer': {'description': _('Defaults to globally defined query layer')}, - }) + @endpoint( + name='tile', + pattern=r'^(?P\d+)/(?P\d+)/(?P\d+).png', + description=_('Get Map Tile'), + example_pattern='{zoom}/{tile_x}/{tile_y}.png', + parameters={ + 'zoom': {'description': _('Zoom Level'), 'example_value': '16'}, + 'tile_x': {'description': _('X Coordinate'), 'example_value': '33650'}, + 'tile_y': {'description': _('Y Coordinate'), 'example_value': '23378'}, + 'query_layer': {'description': _('Defaults to globally defined query layer')}, + }, + ) def tile(self, request, zoom, tile_x, tile_y, query_layer=None): zoom = int(zoom) @@ -314,9 +324,9 @@ class OpenGIS(BaseResource): tile_y = int(tile_y) # lower left - ll_lon, ll_lat = num2deg(tile_x, tile_y+1, zoom) + ll_lon, ll_lat = num2deg(tile_x, tile_y + 1, zoom) # upper right - ur_lon, ur_lat = num2deg(tile_x+1, tile_y, zoom) + ur_lon, ur_lat = num2deg(tile_x + 1, tile_y, zoom) bbox = self.get_bbox(ll_lon, ll_lat, ur_lon, ur_lat) @@ -333,22 +343,21 @@ class OpenGIS(BaseResource): 'crs': self.projection, 'bbox': bbox, } - response = self.requests.get( - self.wms_service_url, - params=params, - cache_duration=300) + response = self.requests.get(self.wms_service_url, params=params, cache_duration=300) return HttpResponse(response.content, content_type='image/png') - @endpoint(perm='can_access', description=_('Get feature info'), - parameters={ - 'lat': {'description': _('Latitude'), 'example_value': '45.79689'}, - 'lon': {'description': _('Longitude'), 'example_value': '4.78414'}, - 'type_names': { - 'description': _('Type of feature to query. Defaults to globally defined ' - 'query layer'), - 'example_value': 'feature' - }, - }) + @endpoint( + perm='can_access', + description=_('Get feature info'), + parameters={ + 'lat': {'description': _('Latitude'), 'example_value': '45.79689'}, + 'lon': {'description': _('Longitude'), 'example_value': '4.78414'}, + 'type_names': { + 'description': _('Type of feature to query. Defaults to globally defined ' 'query layer'), + 'example_value': 'feature', + }, + }, + ) def reverse(self, request, lat, lon, type_names=None, **kwargs): lon, lat = self.convert_coordinates(lon, lat) @@ -389,32 +398,38 @@ class OpenGIS(BaseResource): return result raise APIError('Unable to geocode') - @endpoint(name='query', - description=_('Query'), - pattern=r'^(?P[\w:_-]+)/$', - perm='can_access', - parameters={ - 'bbox': { - 'description': _('Only include results inside bounding box. Expected coordinates ' - 'format is lonmin,latmin,lonmax,latmax'), - }, - 'circle': { - 'description': _('Only include results inside circle. Expected coordinates ' - 'format is center_longitude,center_latitude,radius ("radius" ' - 'being a distance in meters)'), - }, - 'q': { - 'description': _('Text search for specified properties'), - }, - 'property': { - 'description': _( - 'Filter on any GeoJSON property value. If a feature has an "id" property, ' - 'format will be property:id=value.' - ), - 'optional': True, - } - }, - show=False) + @endpoint( + name='query', + description=_('Query'), + pattern=r'^(?P[\w:_-]+)/$', + perm='can_access', + parameters={ + 'bbox': { + 'description': _( + 'Only include results inside bounding box. Expected coordinates ' + 'format is lonmin,latmin,lonmax,latmax' + ), + }, + 'circle': { + 'description': _( + 'Only include results inside circle. Expected coordinates ' + 'format is center_longitude,center_latitude,radius ("radius" ' + 'being a distance in meters)' + ), + }, + 'q': { + 'description': _('Text search for specified properties'), + }, + 'property': { + 'description': _( + 'Filter on any GeoJSON property value. If a feature has an "id" property, ' + 'format will be property:id=value.' + ), + 'optional': True, + }, + }, + show=False, + ) def query(self, request, query_slug, bbox=None, circle=None, q=None, **kwargs): if bbox and circle: raise APIError('bbox and circle parameters are mutually exclusive') @@ -465,17 +480,16 @@ class OpenGIS(BaseResource): class Query(BaseQuery): resource = models.ForeignKey( - to=OpenGIS, - on_delete=models.CASCADE, - related_name='queries', - verbose_name=_('Resource')) + to=OpenGIS, on_delete=models.CASCADE, related_name='queries', verbose_name=_('Resource') + ) typename = models.CharField(_('Feature type'), max_length=256) filter_expression = models.TextField(_('XML filter'), blank=True) indexing_template = models.TextField( verbose_name=_('Indexing template'), help_text=_('Template used to generate contents used in fulltext search.'), - blank=True) + blank=True, + ) delete_view = 'opengis-query-delete' edit_view = 'opengis-query-edit' @@ -492,8 +506,9 @@ class Query(BaseQuery): def q(self, request, bbox, q, circle, **kwargs): features = self.features.all() if not features.exists(): - raise APIError('No data. (maybe not synchronized yet? retry in a few minutes.)', - extra_dict={'features': []}) + raise APIError( + 'No data. (maybe not synchronized yet? retry in a few minutes.)', extra_dict={'features': []} + ) filters = {} for lookup, value in kwargs.items(): @@ -510,29 +525,27 @@ class Query(BaseQuery): try: lonmin, latmin, lonmax, latmax = (float(x) for x in bbox.split(',')) except (ValueError, AttributeError): - raise APIError('Invalid bbox parameter, it must be a comma separated list of ' - 'floating point numbers.') - features = features.filter(lon__gte=lonmin, lon__lte=lonmax, lat__gte=latmin, - lat__lte=latmax) + raise APIError( + 'Invalid bbox parameter, it must be a comma separated list of ' 'floating point numbers.' + ) + features = features.filter(lon__gte=lonmin, lon__lte=lonmax, lat__gte=latmin, lat__lte=latmax) if circle: try: center_lon, center_lat, radius = (float(x) for x in circle.split(',')) except (ValueError, AttributeError): - raise APIError('Invalid circle parameter, it must be a comma separated list of ' - 'floating point numbers.') + raise APIError( + 'Invalid circle parameter, it must be a comma separated list of ' + 'floating point numbers.' + ) coords = self.get_bbox_containing_circle(center_lon, center_lat, radius) lonmin, latmin, lonmax, latmax = coords - features = features.filter(lon__gte=lonmin, lon__lte=lonmax, lat__gte=latmin, - lat__lte=latmax) + features = features.filter(lon__gte=lonmin, lon__lte=lonmax, lat__gte=latmin, lat__lte=latmax) if q: features = features.filter(text__search=simplify(q)) - data = { - 'type': 'FeatureCollection', - 'name': self.typename - } + data = {'type': 'FeatureCollection', 'name': self.typename} if circle: results = [] for feature in features: @@ -607,10 +620,8 @@ class Query(BaseQuery): class FeatureCache(models.Model): query = models.ForeignKey( - to=Query, - on_delete=models.CASCADE, - related_name='features', - verbose_name=_('Query')) + to=Query, on_delete=models.CASCADE, related_name='features', verbose_name=_('Query') + ) lat = models.FloatField() lon = models.FloatField() text = models.CharField(max_length=2048) diff --git a/passerelle/apps/opengis/urls.py b/passerelle/apps/opengis/urls.py index 3e68b735..deb4ee7e 100644 --- a/passerelle/apps/opengis/urls.py +++ b/passerelle/apps/opengis/urls.py @@ -19,10 +19,11 @@ from django.conf.urls import url from . import views management_urlpatterns = [ - url(r'^(?P[\w,-]+)/query/new/$', - views.QueryNew.as_view(), name='opengis-query-new'), - url(r'^(?P[\w,-]+)/query/(?P\d+)/$', - views.QueryEdit.as_view(), name='opengis-query-edit'), - url(r'^(?P[\w,-]+)/query/(?P\d+)/delete/$', - views.QueryDelete.as_view(), name='opengis-query-delete'), + url(r'^(?P[\w,-]+)/query/new/$', views.QueryNew.as_view(), name='opengis-query-new'), + url(r'^(?P[\w,-]+)/query/(?P\d+)/$', views.QueryEdit.as_view(), name='opengis-query-edit'), + url( + r'^(?P[\w,-]+)/query/(?P\d+)/delete/$', + views.QueryDelete.as_view(), + name='opengis-query-delete', + ), ] diff --git a/passerelle/apps/orange/migrations/0001_initial.py b/passerelle/apps/orange/migrations/0001_initial.py index 3dced99f..a68c03a4 100644 --- a/passerelle/apps/orange/migrations/0001_initial.py +++ b/passerelle/apps/orange/migrations/0001_initial.py @@ -14,12 +14,32 @@ class Migration(migrations.Migration): migrations.CreateModel( name='OrangeSMSGateway', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('keystore', models.FileField(help_text='Certificate and private key in PEM format', upload_to=b'orange', null=True, verbose_name='Keystore', blank=True)), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_orangesmsgateway_users_+', related_query_name='+', blank=True)), + ( + 'keystore', + models.FileField( + help_text='Certificate and private key in PEM format', + upload_to=b'orange', + null=True, + verbose_name='Keystore', + blank=True, + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_orangesmsgateway_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'db_table': 'sms_orange', diff --git a/passerelle/apps/orange/migrations/0002_orangesmsgateway_log_level.py b/passerelle/apps/orange/migrations/0002_orangesmsgateway_log_level.py index e18d36b6..34bbc283 100644 --- a/passerelle/apps/orange/migrations/0002_orangesmsgateway_log_level.py +++ b/passerelle/apps/orange/migrations/0002_orangesmsgateway_log_level.py @@ -14,7 +14,13 @@ class Migration(migrations.Migration): migrations.AddField( model_name='orangesmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Debug Enabled', + blank=True, + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/orange/migrations/0003_auto_20160316_0910.py b/passerelle/apps/orange/migrations/0003_auto_20160316_0910.py index 0d216801..d18b723e 100644 --- a/passerelle/apps/orange/migrations/0003_auto_20160316_0910.py +++ b/passerelle/apps/orange/migrations/0003_auto_20160316_0910.py @@ -14,7 +14,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='orangesmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/orange/migrations/0004_auto_20160407_0456.py b/passerelle/apps/orange/migrations/0004_auto_20160407_0456.py index 250ed2b6..a3f33837 100644 --- a/passerelle/apps/orange/migrations/0004_auto_20160407_0456.py +++ b/passerelle/apps/orange/migrations/0004_auto_20160407_0456.py @@ -14,7 +14,19 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='orangesmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/orange/migrations/0007_auto_20200310_1539.py b/passerelle/apps/orange/migrations/0007_auto_20200310_1539.py index 76fad4ab..24668ccf 100644 --- a/passerelle/apps/orange/migrations/0007_auto_20200310_1539.py +++ b/passerelle/apps/orange/migrations/0007_auto_20200310_1539.py @@ -20,6 +20,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='orangesmsgateway', name='keystore', - field=models.FileField(blank=True, help_text='Certificate and private key in PEM format', null=True, upload_to='orange', verbose_name='Keystore'), + field=models.FileField( + blank=True, + help_text='Certificate and private key in PEM format', + null=True, + upload_to='orange', + verbose_name='Keystore', + ), ), ] diff --git a/passerelle/apps/orange/models.py b/passerelle/apps/orange/models.py index ec1108b5..24fe1db2 100644 --- a/passerelle/apps/orange/models.py +++ b/passerelle/apps/orange/models.py @@ -59,8 +59,7 @@ class OrangeSMSGateway(SMSResource): params = {'username': self.username, 'password': self.password} response = self.requests.post(URL_TOKEN, data=params, headers=headers) if response.status_code != 200: - raise APIError('Bad username or password: %s, %s' % ( - response.status_code, response.text)) + raise APIError('Bad username or password: %s, %s' % (response.status_code, response.text)) response_json = get_json(response) if 'access_token' not in response_json: raise OrangeError('Orange do not return access token') @@ -70,8 +69,7 @@ class OrangeSMSGateway(SMSResource): headers = {'authorization': 'Bearer %s' % access_token} response = self.requests.get(URL_GROUPS, headers=headers) if response.status_code != 200: - raise APIError('Bad token: %s, %s' % ( - response.status_code, response.text)) + raise APIError('Bad token: %s, %s' % (response.status_code, response.text)) response_json = get_json(response) group_id = None for group in response_json: @@ -90,16 +88,11 @@ class OrangeSMSGateway(SMSResource): payload = { 'name': 'Send a SMS from passerelle', 'msisdns': destinations, - 'smsParam': { - 'encoding': 'GSM7', - 'body': message - } + 'smsParam': {'encoding': 'GSM7', 'body': message}, } - response = self.requests.post( - URL_DIFFUSION % group_id, json=payload, headers=headers) + response = self.requests.post(URL_DIFFUSION % group_id, json=payload, headers=headers) if response.status_code != 201: - raise OrangeError('Orange fails to send SMS: %s, %s' % ( - response.status_code, response.text)) + raise OrangeError('Orange fails to send SMS: %s, %s' % (response.status_code, response.text)) return get_json(response) def send_msg(self, text, sender, destinations, **kwargs): diff --git a/passerelle/apps/ovh/admin.py b/passerelle/apps/ovh/admin.py index 6c86fbee..aa144d48 100644 --- a/passerelle/apps/ovh/admin.py +++ b/passerelle/apps/ovh/admin.py @@ -2,10 +2,20 @@ from django.contrib import admin from .models import OVHSMSGateway + class OVHSMSGatewayAdmin(admin.ModelAdmin): prepopulated_fields = {'slug': ('title',)} - list_display = ['title', 'slug', 'description', 'username', 'password', - 'msg_class', 'credit_threshold_alert', 'credit_left', - 'default_country_code'] + list_display = [ + 'title', + 'slug', + 'description', + 'username', + 'password', + 'msg_class', + 'credit_threshold_alert', + 'credit_left', + 'default_country_code', + ] + admin.site.register(OVHSMSGateway, OVHSMSGatewayAdmin) diff --git a/passerelle/apps/ovh/migrations/0001_initial.py b/passerelle/apps/ovh/migrations/0001_initial.py index f9d7848c..2768677f 100644 --- a/passerelle/apps/ovh/migrations/0001_initial.py +++ b/passerelle/apps/ovh/migrations/0001_initial.py @@ -14,18 +14,53 @@ class Migration(migrations.Migration): migrations.CreateModel( name='OVHSMSGateway', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), ('account', models.CharField(max_length=64, verbose_name='Account')), ('username', models.CharField(max_length=64, verbose_name='Username')), ('password', models.CharField(max_length=64, verbose_name='Password')), - ('msg_class', models.IntegerField(choices=[(0, 'Message are directly shown to users on phone screen at reception. The message is never stored, neither in the phone memory nor in the SIM card. It is deleted as soon as the user validate the display.'), (1, 'Messages are stored in the phone memory, or in the SIM card if the memory is full. '), (2, 'Messages are stored in the SIM card.'), (3, 'Messages are stored in external storage like a PDA or a PC.')], default=1, verbose_name='Message class')), - ('credit_threshold_alert', models.PositiveIntegerField(default=100, verbose_name='Credit alert threshold')), - ('default_country_code', models.CharField(default='33', max_length=3, verbose_name='Default country code')), + ( + 'msg_class', + models.IntegerField( + choices=[ + ( + 0, + 'Message are directly shown to users on phone screen at reception. The message is never stored, neither in the phone memory nor in the SIM card. It is deleted as soon as the user validate the display.', + ), + ( + 1, + 'Messages are stored in the phone memory, or in the SIM card if the memory is full. ', + ), + (2, 'Messages are stored in the SIM card.'), + (3, 'Messages are stored in external storage like a PDA or a PC.'), + ], + default=1, + verbose_name='Message class', + ), + ), + ( + 'credit_threshold_alert', + models.PositiveIntegerField(default=100, verbose_name='Credit alert threshold'), + ), + ( + 'default_country_code', + models.CharField(default='33', max_length=3, verbose_name='Default country code'), + ), ('credit_left', models.PositiveIntegerField(default=0, verbose_name='Credit left')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_ovhsmsgateway_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_ovhsmsgateway_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'db_table': 'sms_ovh', diff --git a/passerelle/apps/ovh/migrations/0002_ovhsmsgateway_log_level.py b/passerelle/apps/ovh/migrations/0002_ovhsmsgateway_log_level.py index c4637dec..939359dc 100644 --- a/passerelle/apps/ovh/migrations/0002_ovhsmsgateway_log_level.py +++ b/passerelle/apps/ovh/migrations/0002_ovhsmsgateway_log_level.py @@ -14,7 +14,13 @@ class Migration(migrations.Migration): migrations.AddField( model_name='ovhsmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Debug Enabled', + blank=True, + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/ovh/migrations/0003_auto_20160316_0910.py b/passerelle/apps/ovh/migrations/0003_auto_20160316_0910.py index b69da707..b3962583 100644 --- a/passerelle/apps/ovh/migrations/0003_auto_20160316_0910.py +++ b/passerelle/apps/ovh/migrations/0003_auto_20160316_0910.py @@ -14,7 +14,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='ovhsmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/ovh/migrations/0004_auto_20160407_0456.py b/passerelle/apps/ovh/migrations/0004_auto_20160407_0456.py index f54fe82b..396d74c0 100644 --- a/passerelle/apps/ovh/migrations/0004_auto_20160407_0456.py +++ b/passerelle/apps/ovh/migrations/0004_auto_20160407_0456.py @@ -14,7 +14,19 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='ovhsmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/ovh/migrations/0009_auto_20200730_1047.py b/passerelle/apps/ovh/migrations/0009_auto_20200730_1047.py index a3be3ad4..c6b1c970 100644 --- a/passerelle/apps/ovh/migrations/0009_auto_20200730_1047.py +++ b/passerelle/apps/ovh/migrations/0009_auto_20200730_1047.py @@ -20,26 +20,45 @@ class Migration(migrations.Migration): migrations.AddField( model_name='ovhsmsgateway', name='application_secret', - field=models.CharField(blank=True, help_text='Obtained at the same time as "Application key".', max_length=32, verbose_name='Application secret'), + field=models.CharField( + blank=True, + help_text='Obtained at the same time as "Application key".', + max_length=32, + verbose_name='Application secret', + ), ), migrations.AddField( model_name='ovhsmsgateway', name='consumer_key', - field=models.CharField(blank=True, help_text='Automatically obtained from OVH, should not be filled manually.', max_length=32, verbose_name='Consumer key'), + field=models.CharField( + blank=True, + help_text='Automatically obtained from OVH, should not be filled manually.', + max_length=32, + verbose_name='Consumer key', + ), ), migrations.AlterField( model_name='ovhsmsgateway', name='account', - field=models.CharField(help_text='Account identifier, such as sms-XXXXXX-1.', max_length=64, verbose_name='Account'), + field=models.CharField( + help_text='Account identifier, such as sms-XXXXXX-1.', max_length=64, verbose_name='Account' + ), ), migrations.AlterField( model_name='ovhsmsgateway', name='password', - field=models.CharField(blank=True, help_text='Password for legacy API. This field is obsolete once keys and secret fields below are filled.', max_length=64, verbose_name='Password (deprecated)'), + field=models.CharField( + blank=True, + help_text='Password for legacy API. This field is obsolete once keys and secret fields below are filled.', + max_length=64, + verbose_name='Password (deprecated)', + ), ), migrations.AlterField( model_name='ovhsmsgateway', name='username', - field=models.CharField(help_text='API user created on the SMS account.', max_length=64, verbose_name='Username'), + field=models.CharField( + help_text='API user created on the SMS account.', max_length=64, verbose_name='Username' + ), ), ] diff --git a/passerelle/apps/ovh/migrations/0011_auto_20201026_1424.py b/passerelle/apps/ovh/migrations/0011_auto_20201026_1424.py index b5ac9bbc..6b146458 100644 --- a/passerelle/apps/ovh/migrations/0011_auto_20201026_1424.py +++ b/passerelle/apps/ovh/migrations/0011_auto_20201026_1424.py @@ -16,7 +16,13 @@ class Migration(migrations.Migration): migrations.AddField( model_name='ovhsmsgateway', name='alert_emails', - field=django.contrib.postgres.fields.ArrayField(base_field=models.EmailField(blank=True, max_length=254), blank=True, null=True, size=None, verbose_name='Email addresses to send credit alerts to'), + field=django.contrib.postgres.fields.ArrayField( + base_field=models.EmailField(blank=True, max_length=254), + blank=True, + null=True, + size=None, + verbose_name='Email addresses to send credit alerts to', + ), ), migrations.AddField( model_name='ovhsmsgateway', @@ -31,11 +37,20 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='ovhsmsgateway', name='password', - field=models.CharField(blank=True, help_text='Password for legacy API. This field is obsolete once keys and secret fields above are filled.', max_length=64, verbose_name='Password (deprecated)'), + field=models.CharField( + blank=True, + help_text='Password for legacy API. This field is obsolete once keys and secret fields above are filled.', + max_length=64, + verbose_name='Password (deprecated)', + ), ), migrations.AlterField( model_name='ovhsmsgateway', name='username', - field=models.CharField(help_text='API user created on the SMS account. This field is obsolete once keys and secret fields above are filled.', max_length=64, verbose_name='Username (deprecated)'), + field=models.CharField( + help_text='API user created on the SMS account. This field is obsolete once keys and secret fields above are filled.', + max_length=64, + verbose_name='Username (deprecated)', + ), ), ] diff --git a/passerelle/apps/ovh/migrations/0012_auto_20201027_1121.py b/passerelle/apps/ovh/migrations/0012_auto_20201027_1121.py index c9f53457..62c6b077 100644 --- a/passerelle/apps/ovh/migrations/0012_auto_20201027_1121.py +++ b/passerelle/apps/ovh/migrations/0012_auto_20201027_1121.py @@ -20,6 +20,11 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='ovhsmsgateway', name='username', - field=models.CharField(blank=True, help_text='API user created on the SMS account. This field is obsolete once keys and secret fields above are filled.', max_length=64, verbose_name='Username (deprecated)'), + field=models.CharField( + blank=True, + help_text='API user created on the SMS account. This field is obsolete once keys and secret fields above are filled.', + max_length=64, + verbose_name='Username (deprecated)', + ), ), ] diff --git a/passerelle/apps/ovh/models.py b/passerelle/apps/ovh/models.py index d55a0f61..c28e3f17 100644 --- a/passerelle/apps/ovh/models.py +++ b/passerelle/apps/ovh/models.py @@ -19,20 +19,25 @@ from passerelle.utils.jsonresponse import APIError class OVHSMSGateway(SMSResource): - documentation_url = 'https://doc-publik.entrouvert.com/admin-fonctionnel/les-tutos/configuration-envoi-sms/' + documentation_url = ( + 'https://doc-publik.entrouvert.com/admin-fonctionnel/les-tutos/configuration-envoi-sms/' + ) hide_description_fields = ['account', 'credit_left'] API_URL = 'https://eu.api.ovh.com/1.0/sms/%(serviceName)s/' URL = 'https://www.ovh.com/cgi-bin/sms/http2sms.cgi' MESSAGES_CLASSES = ( - (0, _('Message are directly shown to users on phone screen ' - 'at reception. The message is never stored, neither in the ' - 'phone memory nor in the SIM card. It is deleted as ' - 'soon as the user validate the display.')), - (1, _('Messages are stored in the phone memory, or in the ' - 'SIM card if the memory is full. ')), + ( + 0, + _( + 'Message are directly shown to users on phone screen ' + 'at reception. The message is never stored, neither in the ' + 'phone memory nor in the SIM card. It is deleted as ' + 'soon as the user validate the display.' + ), + ), + (1, _('Messages are stored in the phone memory, or in the ' 'SIM card if the memory is full. ')), (2, _('Messages are stored in the SIM card.')), - (3, _('Messages are stored in external storage like a PDA or ' - 'a PC.')), + (3, _('Messages are stored in external storage like a PDA or ' 'a PC.')), ) NEW_MESSAGES_CLASSES = ['flash', 'phoneDisplay', 'sim', 'toolkit'] @@ -62,8 +67,10 @@ class OVHSMSGateway(SMSResource): verbose_name=_('Username (deprecated)'), max_length=64, blank=True, - help_text=_('API user created on the SMS account. This field is obsolete once keys and secret ' - 'fields above are filled.'), + help_text=_( + 'API user created on the SMS account. This field is obsolete once keys and secret ' + 'fields above are filled.' + ), ) password = models.CharField( verbose_name=_('Password (deprecated)'), @@ -73,10 +80,10 @@ class OVHSMSGateway(SMSResource): 'Password for legacy API. This field is obsolete once keys and secret fields above are filled.' ), ) - msg_class = models.IntegerField(choices=MESSAGES_CLASSES, default=1, - verbose_name=_('Message class')) - credit_threshold_alert = models.PositiveIntegerField(verbose_name=_('Credit alert threshold'), - default=500) + msg_class = models.IntegerField(choices=MESSAGES_CLASSES, default=1, verbose_name=_('Message class')) + credit_threshold_alert = models.PositiveIntegerField( + verbose_name=_('Credit alert threshold'), default=500 + ) credit_left = models.PositiveIntegerField(verbose_name=_('Credit left'), default=0, editable=False) alert_emails = ArrayField( models.EmailField(blank=True), @@ -98,7 +105,7 @@ class OVHSMSGateway(SMSResource): 'result': { 'err': 1, 'err_desc': 'OVH error: bad JSON response', - } + }, }, { 'response': { @@ -110,19 +117,13 @@ class OVHSMSGateway(SMSResource): 'err': 0, 'data': { 'credit_left': 47.0, - 'ovh_result': { - 'SmsIds': [1234], - 'creditLeft': 47, - 'status': 100 - }, + 'ovh_result': {'SmsIds': [1234], 'creditLeft': 47, 'status': 100}, 'sms_ids': [1234], 'warning': 'credit level too low for ovhsmsgateway: 47.0 (threshold 100)', - } - } - } - + }, + }, + }, ], - } class Meta: @@ -263,16 +264,18 @@ class OVHSMSGateway(SMSResource): raise APIError('OVH error: bad JSON response') else: if not isinstance(result, dict): - raise APIError('OVH error: bad JSON response %r, it should be a dictionnary' % - result) + raise APIError('OVH error: bad JSON response %r, it should be a dictionnary' % result) if 100 <= result['status'] < 200: ret = {} credit_left = float(result['creditLeft']) # update credit left OVHSMSGateway.objects.filter(id=self.id).update(credit_left=credit_left) if credit_left < self.credit_threshold_alert: - ret['warning'] = ('credit level too low for %s: %s (threshold %s)' % - (self.slug, credit_left, self.credit_threshold_alert)) + ret['warning'] = 'credit level too low for %s: %s (threshold %s)' % ( + self.slug, + credit_left, + self.credit_threshold_alert, + ) ret['credit_left'] = credit_left ret['ovh_result'] = result ret['sms_ids'] = result.get('SmsIds', []) diff --git a/passerelle/apps/ovh/urls.py b/passerelle/apps/ovh/urls.py index 5d800b2b..b4ebb954 100644 --- a/passerelle/apps/ovh/urls.py +++ b/passerelle/apps/ovh/urls.py @@ -3,8 +3,10 @@ from django.conf.urls import url from . import views management_urlpatterns = [ - url(r'^(?P[\w,-]+)/request_token/$', - views.RequestTokenView.as_view(), name='ovh-request-token'), - url(r'^(?P[\w,-]+)/confirm_token/(?P[a-z0-9-]+)/$', - views.ConfirmTokenView.as_view(), name='ovh-confirm-token'), + url(r'^(?P[\w,-]+)/request_token/$', views.RequestTokenView.as_view(), name='ovh-request-token'), + url( + r'^(?P[\w,-]+)/confirm_token/(?P[a-z0-9-]+)/$', + views.ConfirmTokenView.as_view(), + name='ovh-confirm-token', + ), ] diff --git a/passerelle/apps/oxyd/admin.py b/passerelle/apps/oxyd/admin.py index 1ee33106..0fe2cc30 100644 --- a/passerelle/apps/oxyd/admin.py +++ b/passerelle/apps/oxyd/admin.py @@ -2,9 +2,10 @@ from django.contrib import admin from .models import OxydSMSGateway + class OxydSMSGatewayAdmin(admin.ModelAdmin): prepopulated_fields = {'slug': ('title',)} - list_display = ['title', 'slug', 'description', 'username', 'password', - 'default_country_code'] + list_display = ['title', 'slug', 'description', 'username', 'password', 'default_country_code'] + admin.site.register(OxydSMSGateway, OxydSMSGatewayAdmin) diff --git a/passerelle/apps/oxyd/migrations/0001_initial.py b/passerelle/apps/oxyd/migrations/0001_initial.py index 8f031504..4995ed70 100644 --- a/passerelle/apps/oxyd/migrations/0001_initial.py +++ b/passerelle/apps/oxyd/migrations/0001_initial.py @@ -14,14 +14,28 @@ class Migration(migrations.Migration): migrations.CreateModel( name='OxydSMSGateway', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), ('username', models.CharField(max_length=64, verbose_name='Username')), ('password', models.CharField(max_length=64, verbose_name='Password')), - ('default_country_code', models.CharField(default='33', max_length=3, verbose_name='Default country code')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_oxydsmsgateway_users_+', related_query_name='+', blank=True)), + ( + 'default_country_code', + models.CharField(default='33', max_length=3, verbose_name='Default country code'), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_oxydsmsgateway_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'db_table': 'sms_oxyd', diff --git a/passerelle/apps/oxyd/migrations/0002_oxydsmsgateway_log_level.py b/passerelle/apps/oxyd/migrations/0002_oxydsmsgateway_log_level.py index 678d407e..8872889c 100644 --- a/passerelle/apps/oxyd/migrations/0002_oxydsmsgateway_log_level.py +++ b/passerelle/apps/oxyd/migrations/0002_oxydsmsgateway_log_level.py @@ -14,7 +14,13 @@ class Migration(migrations.Migration): migrations.AddField( model_name='oxydsmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Debug Enabled', + blank=True, + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/oxyd/migrations/0003_auto_20160316_0910.py b/passerelle/apps/oxyd/migrations/0003_auto_20160316_0910.py index 2329aba4..93125b97 100644 --- a/passerelle/apps/oxyd/migrations/0003_auto_20160316_0910.py +++ b/passerelle/apps/oxyd/migrations/0003_auto_20160316_0910.py @@ -14,7 +14,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='oxydsmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/oxyd/migrations/0004_auto_20160407_0456.py b/passerelle/apps/oxyd/migrations/0004_auto_20160407_0456.py index 855e0ebb..b1be187f 100644 --- a/passerelle/apps/oxyd/migrations/0004_auto_20160407_0456.py +++ b/passerelle/apps/oxyd/migrations/0004_auto_20160407_0456.py @@ -14,7 +14,19 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='oxydsmsgateway', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), preserve_default=True, ), ] diff --git a/passerelle/apps/oxyd/models.py b/passerelle/apps/oxyd/models.py index 46cb6afa..53844c1e 100644 --- a/passerelle/apps/oxyd/models.py +++ b/passerelle/apps/oxyd/models.py @@ -27,15 +27,15 @@ class OxydSMSGateway(SMSResource): ['0033688888888', "OXYD error: response is not 200"], ['0033677777777', "OXYD error: response is not 200"], ], - } + }, }, { 'response': '200', 'result': { 'err': 0, 'data': None, - } - } + }, + }, ], } URL = 'http://sms.oxyd.fr/send.php' @@ -67,8 +67,7 @@ class OxydSMSGateway(SMSResource): else: results.append(0) if any(results): - raise APIError( - 'OXYD error: some destinations failed', data=list(zip(destinations, results))) + raise APIError('OXYD error: some destinations failed', data=list(zip(destinations, results))) return None def get_sms_left(self, type='standard'): diff --git a/passerelle/apps/phonecalls/migrations/0001_initial.py b/passerelle/apps/phonecalls/migrations/0001_initial.py index acd539c0..0ed310c6 100644 --- a/passerelle/apps/phonecalls/migrations/0001_initial.py +++ b/passerelle/apps/phonecalls/migrations/0001_initial.py @@ -19,7 +19,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Call', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('callee', models.CharField(max_length=64)), ('caller', models.CharField(max_length=64)), ('start_timestamp', models.DateTimeField(auto_now_add=True)), @@ -34,13 +37,38 @@ class Migration(migrations.Migration): migrations.CreateModel( name='PhoneCalls', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), - ('max_call_duration', models.PositiveIntegerField(default=120, help_text='Each hour, too long calls are closed.', verbose_name='Maximum duration of a call, in minutes.')), - ('data_retention_period', models.PositiveIntegerField(default=60, help_text='Each day, old calls are removed.', verbose_name='Data retention period, in days.')), - ('users', models.ManyToManyField(blank=True, related_name='_phonecalls_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'max_call_duration', + models.PositiveIntegerField( + default=120, + help_text='Each hour, too long calls are closed.', + verbose_name='Maximum duration of a call, in minutes.', + ), + ), + ( + 'data_retention_period', + models.PositiveIntegerField( + default=60, + help_text='Each day, old calls are removed.', + verbose_name='Data retention period, in days.', + ), + ), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_phonecalls_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'Phone Calls', diff --git a/passerelle/apps/phonecalls/migrations/0003_phonecalls_redirect_url.py b/passerelle/apps/phonecalls/migrations/0003_phonecalls_redirect_url.py index ab0ece68..5fdd80a5 100644 --- a/passerelle/apps/phonecalls/migrations/0003_phonecalls_redirect_url.py +++ b/passerelle/apps/phonecalls/migrations/0003_phonecalls_redirect_url.py @@ -15,6 +15,8 @@ class Migration(migrations.Migration): migrations.AddField( model_name='phonecalls', name='redirect_url', - field=models.URLField(null=True, verbose_name='URL to redirect user when notification are in the browser'), + field=models.URLField( + null=True, verbose_name='URL to redirect user when notification are in the browser' + ), ), ] diff --git a/passerelle/apps/phonecalls/models.py b/passerelle/apps/phonecalls/models.py index 4a3ed97d..0fb98484 100644 --- a/passerelle/apps/phonecalls/models.py +++ b/passerelle/apps/phonecalls/models.py @@ -30,36 +30,35 @@ class PhoneCalls(BaseResource): category = _('Telephony') max_call_duration = models.PositiveIntegerField( - _('Maximum duration of a call, in minutes.'), - help_text=_('Each hour, too long calls are closed.'), - default=120) + _('Maximum duration of a call, in minutes.'), + help_text=_('Each hour, too long calls are closed.'), + default=120, + ) data_retention_period = models.PositiveIntegerField( - _('Data retention period, in days.'), - help_text=_('Each day, old calls are removed.'), - default=60) + _('Data retention period, in days.'), help_text=_('Each day, old calls are removed.'), default=60 + ) redirect_url = models.URLField( - verbose_name=_('URL to redirect to or open ' - 'when using redirect or newtab parameters'), - null=True) + verbose_name=_('URL to redirect to or open ' 'when using redirect or newtab parameters'), null=True + ) class Meta: verbose_name = _('Phone Calls') - @endpoint(name='call-start', - description=_('Notify a call start'), - perm='can_access', - parameters={ - 'callee': {'description': _('Callee number'), - 'example_value': '142'}, - 'caller': {'description': _('Caller number'), - 'example_value': '0143350135'}, - 'redirect': {'description': _('Redirect browser to configured URL')}, - 'newtab': {'description': _('Open configured URL in a new tab')}, - }) + @endpoint( + name='call-start', + description=_('Notify a call start'), + perm='can_access', + parameters={ + 'callee': {'description': _('Callee number'), 'example_value': '142'}, + 'caller': {'description': _('Caller number'), 'example_value': '0143350135'}, + 'redirect': {'description': _('Redirect browser to configured URL')}, + 'newtab': {'description': _('Open configured URL in a new tab')}, + }, + ) def call_start(self, request, callee, caller, redirect=None, newtab=None, **kwargs): - existing_call = Call.objects.filter(resource=self, - callee=callee, caller=caller, - end_timestamp=None).last() + existing_call = Call.objects.filter( + resource=self, callee=callee, caller=caller, end_timestamp=None + ).last() if existing_call: existing_call.details = kwargs existing_call.save() @@ -82,34 +81,35 @@ class PhoneCalls(BaseResource): return render(request, 'phonecalls/start_call_newtab.html', {'redirect_url': redirect_url}) return response - @endpoint(name='call-stop', - description=_('Notify a call end'), - perm='can_access', - parameters={ - 'callee': {'description': _('Callee number'), - 'example_value': '142'}, - 'caller': {'description': _('Caller number'), - 'example_value': '0143350135'}, - }) + @endpoint( + name='call-stop', + description=_('Notify a call end'), + perm='can_access', + parameters={ + 'callee': {'description': _('Callee number'), 'example_value': '142'}, + 'caller': {'description': _('Caller number'), 'example_value': '0143350135'}, + }, + ) def call_stop(self, request, callee, caller, **kwargs): # close all current callee/caller calls data = [] - for current_call in Call.objects.filter(resource=self, - callee=callee, caller=caller, - end_timestamp=None): + for current_call in Call.objects.filter( + resource=self, callee=callee, caller=caller, end_timestamp=None + ): current_call.end_timestamp = now() current_call.save() data.append(current_call.json()) return {'data': data} - @endpoint(name='calls', - description=_('Get list of calls to a line'), - perm='can_access', - parameters={ - 'callee': {'description': _('Callee number'), - 'example_value': '142'}, - 'limit': {'description': _('Maximal number of results')}, - }) + @endpoint( + name='calls', + description=_('Get list of calls to a line'), + perm='can_access', + parameters={ + 'callee': {'description': _('Callee number'), 'example_value': '142'}, + 'limit': {'description': _('Maximal number of results')}, + }, + ) def calls(self, request, callee=None, caller=None, limit=30): calls = Call.objects.filter(resource=self) if callee: @@ -119,6 +119,7 @@ class PhoneCalls(BaseResource): def json_list(calls): return [call.json() for call in calls[:limit]] + return { 'data': { 'current': json_list(calls.filter(end_timestamp__isnull=True)), @@ -130,15 +131,17 @@ class PhoneCalls(BaseResource): super(PhoneCalls, self).hourly() # close unfinished long calls maximal_time = now() - timedelta(minutes=self.max_call_duration) - Call.objects.filter(resource=self, end_timestamp=None, - start_timestamp__lt=maximal_time).update(end_timestamp=now()) + Call.objects.filter(resource=self, end_timestamp=None, start_timestamp__lt=maximal_time).update( + end_timestamp=now() + ) def daily(self): super(PhoneCalls, self).daily() # remove finished old calls maximal_time = now() - timedelta(days=self.data_retention_period) - Call.objects.filter(resource=self, end_timestamp__isnull=False, - end_timestamp__lt=maximal_time).delete() + Call.objects.filter( + resource=self, end_timestamp__isnull=False, end_timestamp__lt=maximal_time + ).delete() class Call(models.Model): diff --git a/passerelle/apps/photon/migrations/0001_initial.py b/passerelle/apps/photon/migrations/0001_initial.py index c93df3e5..394dc80b 100644 --- a/passerelle/apps/photon/migrations/0001_initial.py +++ b/passerelle/apps/photon/migrations/0001_initial.py @@ -18,7 +18,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='AddressCacheModel', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('api_id', models.CharField(max_length=32, unique=True)), ('data', django.contrib.postgres.fields.jsonb.JSONField()), ('timestamp', models.DateTimeField(auto_now=True)), @@ -27,14 +30,46 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Photon', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), ('description', models.TextField(verbose_name='Description')), - ('service_url', models.CharField(default='https://photon.komoot.io/', help_text='Base Adresse Web Service URL', max_length=128, verbose_name='Service URL')), - ('latitude', models.FloatField(blank=True, help_text='Geographic priority for /addresses/ endpoint.', null=True, verbose_name='Latitude')), - ('longitude', models.FloatField(blank=True, help_text='Geographic priority for /addresses/ endpoint.', null=True, verbose_name='Longitude')), - ('users', models.ManyToManyField(blank=True, related_name='_photon_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'service_url', + models.CharField( + default='https://photon.komoot.io/', + help_text='Base Adresse Web Service URL', + max_length=128, + verbose_name='Service URL', + ), + ), + ( + 'latitude', + models.FloatField( + blank=True, + help_text='Geographic priority for /addresses/ endpoint.', + null=True, + verbose_name='Latitude', + ), + ), + ( + 'longitude', + models.FloatField( + blank=True, + help_text='Geographic priority for /addresses/ endpoint.', + null=True, + verbose_name='Longitude', + ), + ), + ( + 'users', + models.ManyToManyField( + blank=True, related_name='_photon_users_+', related_query_name='+', to='base.ApiUser' + ), + ), ], options={ 'verbose_name': 'Photon Web Service', diff --git a/passerelle/apps/solis/migrations/0001_initial.py b/passerelle/apps/solis/migrations/0001_initial.py index 07395e37..262351fc 100644 --- a/passerelle/apps/solis/migrations/0001_initial.py +++ b/passerelle/apps/solis/migrations/0001_initial.py @@ -14,15 +14,43 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Solis', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('service_url', models.URLField(help_text='Solis API base URL', max_length=256, verbose_name='Service URL')), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + 'service_url', + models.URLField( + help_text='Solis API base URL', max_length=256, verbose_name='Service URL' + ), + ), ('username', models.CharField(max_length=128, verbose_name='Username')), ('password', models.CharField(max_length=128, verbose_name='Password')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_solis_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_solis_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': 'Solis', @@ -31,7 +59,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='SolisAPALink', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('name_id', models.CharField(max_length=256)), ('user_id', models.CharField(max_length=64)), ('code', models.CharField(max_length=64)), diff --git a/passerelle/apps/solis/migrations/0003_auto_20171219_0800.py b/passerelle/apps/solis/migrations/0003_auto_20171219_0800.py index 5077a97c..9795ed35 100644 --- a/passerelle/apps/solis/migrations/0003_auto_20171219_0800.py +++ b/passerelle/apps/solis/migrations/0003_auto_20171219_0800.py @@ -35,7 +35,13 @@ class Migration(migrations.Migration): migrations.AddField( model_name='solis', name='client_certificate', - field=models.FileField(help_text='Client certificate and private key (PEM format)', upload_to=passerelle.apps.solis.models.keystore_upload_to, null=True, verbose_name='Client certificate', blank=True), + field=models.FileField( + help_text='Client certificate and private key (PEM format)', + upload_to=passerelle.apps.solis.models.keystore_upload_to, + null=True, + verbose_name='Client certificate', + blank=True, + ), ), migrations.AddField( model_name='solis', @@ -45,7 +51,13 @@ class Migration(migrations.Migration): migrations.AddField( model_name='solis', name='trusted_certificate_authorities', - field=models.FileField(help_text='Trusted CAs (PEM format)', upload_to=passerelle.apps.solis.models.trusted_cas_upload_to, null=True, verbose_name='Trusted CAs', blank=True), + field=models.FileField( + help_text='Trusted CAs (PEM format)', + upload_to=passerelle.apps.solis.models.trusted_cas_upload_to, + null=True, + verbose_name='Trusted CAs', + blank=True, + ), ), migrations.AddField( model_name='solis', diff --git a/passerelle/apps/solis/migrations/0006_solisrsalink.py b/passerelle/apps/solis/migrations/0006_solisrsalink.py index b5fbdc73..44db2910 100644 --- a/passerelle/apps/solis/migrations/0006_solisrsalink.py +++ b/passerelle/apps/solis/migrations/0006_solisrsalink.py @@ -16,12 +16,18 @@ class Migration(migrations.Migration): migrations.CreateModel( name='SolisRSALink', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('name_id', models.CharField(max_length=256)), ('user_id', models.CharField(max_length=64)), ('code', models.CharField(max_length=64)), ('text', models.CharField(max_length=256)), - ('resource', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='solis.Solis')), + ( + 'resource', + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='solis.Solis'), + ), ], ), ] diff --git a/passerelle/apps/solis/models.py b/passerelle/apps/solis/models.py index 30ccb856..f3d29c16 100644 --- a/passerelle/apps/solis/models.py +++ b/passerelle/apps/solis/models.py @@ -32,16 +32,16 @@ from passerelle.utils.conversion import simplify, to_pdf def unflat(flatten_dict, separator='_'): - ''' + """ Expand a "flatten" dict: >>> unflat({'foo': 'bar', 'two_foo': 'one', 'two_bar': 'two'}) {'foo': 'bar', 'two': {'foo': 'one', 'bar': 'two'}} - ''' + """ dict_ = {} for key, value in flatten_dict.items(): root = dict_ keys = [x.strip() for x in key.split(separator)] - for key in keys[:-1]: # build branch + for key in keys[:-1]: # build branch if key not in root: root[key] = {} root = root[key] @@ -58,28 +58,34 @@ def trusted_cas_upload_to(instance, filename): class Solis(BaseResource): - service_url = models.URLField(max_length=256, blank=False, - verbose_name=_('Service URL'), - help_text=_('Solis API base URL')) + service_url = models.URLField( + max_length=256, blank=False, verbose_name=_('Service URL'), help_text=_('Solis API base URL') + ) - basic_auth_username = models.CharField(max_length=128, blank=True, - verbose_name=_('HTTP Basic Auth username')) - basic_auth_password = models.CharField(max_length=128, blank=True, - verbose_name=_('HTTP Basic Auth password')) + basic_auth_username = models.CharField( + max_length=128, blank=True, verbose_name=_('HTTP Basic Auth username') + ) + basic_auth_password = models.CharField( + max_length=128, blank=True, verbose_name=_('HTTP Basic Auth password') + ) - client_certificate = models.FileField(upload_to=keystore_upload_to, - null=True, blank=True, - verbose_name=_('Client certificate'), - help_text=_('Client certificate and private key (PEM format)')) - verify_cert = models.BooleanField(default=True, - verbose_name=_('Check HTTPS Certificate validity')) - trusted_certificate_authorities = models.FileField(upload_to=trusted_cas_upload_to, - null=True, blank=True, - verbose_name=_('Trusted CAs'), - help_text=_('Trusted CAs (PEM format)')) + client_certificate = models.FileField( + upload_to=keystore_upload_to, + null=True, + blank=True, + verbose_name=_('Client certificate'), + help_text=_('Client certificate and private key (PEM format)'), + ) + verify_cert = models.BooleanField(default=True, verbose_name=_('Check HTTPS Certificate validity')) + trusted_certificate_authorities = models.FileField( + upload_to=trusted_cas_upload_to, + null=True, + blank=True, + verbose_name=_('Trusted CAs'), + help_text=_('Trusted CAs (PEM format)'), + ) - http_proxy = models.CharField(max_length=128, blank=True, - verbose_name=_('Proxy URL')) + http_proxy = models.CharField(max_length=128, blank=True, verbose_name=_('Proxy URL')) text_template_name = 'solis/apa_user_text.txt' text_template_name_rsa = 'solis/rsa_user_text.txt' @@ -106,10 +112,11 @@ class Solis(BaseResource): json_content = response.json() except ValueError: json_content = None - raise APIError('error status:%s %r, content:%r' % - (response.status_code, response.reason, response.content[:1024]), - data={'status_code': response.status_code, - 'json_content': json_content}) + raise APIError( + 'error status:%s %r, content:%r' + % (response.status_code, response.reason, response.content[:1024]), + data={'status_code': response.status_code, 'json_content': json_content}, + ) if response.status_code == 204: # 204 No Content return None @@ -120,10 +127,10 @@ class Solis(BaseResource): raise APIError('invalid JSON content:%r' % response.content[:1024]) def check_status(self): - ''' + """ Raise an exception if something goes wrong. If OK, returns something usable by ping() endpoint. - ''' + """ pong = self.request('main/isAlive') try: if not pong.get('response').startswith('Solis API est op'): @@ -137,33 +144,44 @@ class Solis(BaseResource): # deprecated endpoint return self.check_status() - @endpoint(name='referential', perm='can_access', - pattern=r'^(?P[\w-]+)/(?P[\w-]+)/$', - example_pattern='{module}/{name}/', - description=_('Get module/name references'), - parameters={ - 'module': {'description': _('Referential module: asg, civi, trans'), - 'example_value': 'trans'}, - 'name': {'description': _('Referential name in this module'), - 'example_value': 'lieu'}, - 'codePays': {'example_value': '79'}, - 'codeDepartement': {'example_value': '80'}, - 'codeCommune': {'example_value': '21'}, - 'filtreSurInactivite': {'description': _('For trans/commune referential: true or false')}, - 'q': {'description': _('Returns only items whose text matches'), - 'example_value': 'claudel'}, - 'id': {'description': _('Returns only items with this id (code)')}, - 'ignore': {'description': _('Do not return items with this id, ' - 'or multiple ids separated with commas'), - 'example_value': '9999'}, - }) + @endpoint( + name='referential', + perm='can_access', + pattern=r'^(?P[\w-]+)/(?P[\w-]+)/$', + example_pattern='{module}/{name}/', + description=_('Get module/name references'), + parameters={ + 'module': {'description': _('Referential module: asg, civi, trans'), 'example_value': 'trans'}, + 'name': {'description': _('Referential name in this module'), 'example_value': 'lieu'}, + 'codePays': {'example_value': '79'}, + 'codeDepartement': {'example_value': '80'}, + 'codeCommune': {'example_value': '21'}, + 'filtreSurInactivite': {'description': _('For trans/commune referential: true or false')}, + 'q': {'description': _('Returns only items whose text matches'), 'example_value': 'claudel'}, + 'id': {'description': _('Returns only items with this id (code)')}, + 'ignore': { + 'description': _( + 'Do not return items with this id, ' 'or multiple ids separated with commas' + ), + 'example_value': '9999', + }, + }, + ) def referential(self, request, module, name, q=None, id=None, ignore=None, **kwargs): - if (module == 'trans' and name == 'lieu' and 'codeDepartement' in kwargs - and 'codeCommune' in kwargs and q): + if ( + module == 'trans' + and name == 'lieu' + and 'codeDepartement' in kwargs + and 'codeCommune' in kwargs + and q + ): # use optimized endpoint for trans/lieu search - endpoint = 'referentiels/trans/nomlieu/%s/%s/%s' % (kwargs.pop('codeDepartement'), - kwargs.pop('codeCommune'), q) + endpoint = 'referentiels/trans/nomlieu/%s/%s/%s' % ( + kwargs.pop('codeDepartement'), + kwargs.pop('codeCommune'), + q, + ) q = None else: endpoint = 'referentiels/%s/%s' % (module, name) @@ -173,24 +191,27 @@ class Solis(BaseResource): content = self.request(endpoint) - if (not isinstance(content, dict) or len(content) != 1 or - not isinstance(list(content.values())[0], list)): - raise APIError('response is not a dictionnary with only one key ' - 'and whose value is a list', data={'json_content': content}) + if ( + not isinstance(content, dict) + or len(content) != 1 + or not isinstance(list(content.values())[0], list) + ): + raise APIError( + 'response is not a dictionnary with only one key ' 'and whose value is a list', + data={'json_content': content}, + ) items = list(content.values())[0] if not all(isinstance(item, dict) and item.get('code') for item in items): - raise APIError('items must be dictionnaries with a "code" key', - data={'json_content': content}) + raise APIError('items must be dictionnaries with a "code" key', data={'json_content': content}) for item in items: item['id'] = item['code'] item['text'] = item.get('libelle', item['id']) if ignore: - ignore_ids = [ignore_id.strip() for ignore_id in ignore.split(',') - if ignore_id.strip()] + ignore_ids = [ignore_id.strip() for ignore_id in ignore.split(',') if ignore_id.strip()] if q: q = simplify(q) @@ -202,22 +223,23 @@ class Solis(BaseResource): if q and q not in simplify(item['text']): return False return True + items = filter(condition, items) return {'data': list(items)} - @endpoint(name='referential-item', perm='can_access', - pattern=r'^(?P[\w-]+)/(?P[\w-]+)/(?P[\w-]+)/$', - example_pattern='{module}/{name}/{index}/', - description=_('Get an item from module/name (available only on some referentials)'), - parameters={ - 'module': {'description': _('Referential module: asg, civi, trans'), - 'example_value': 'civi'}, - 'name': {'description': _('Referential name in this module'), - 'example_value': 'individu'}, - 'index': {'description': _('Item index number'), - 'example_value': '4273'}, - }) + @endpoint( + name='referential-item', + perm='can_access', + pattern=r'^(?P[\w-]+)/(?P[\w-]+)/(?P[\w-]+)/$', + example_pattern='{module}/{name}/{index}/', + description=_('Get an item from module/name (available only on some referentials)'), + parameters={ + 'module': {'description': _('Referential module: asg, civi, trans'), 'example_value': 'civi'}, + 'name': {'description': _('Referential name in this module'), 'example_value': 'individu'}, + 'index': {'description': _('Item index number'), 'example_value': '4273'}, + }, + ) def referential_item(self, request, module, name, index): endpoint = 'referentiels/%s/%s/%s/' % (module, name, index) content = self.request(endpoint) @@ -230,10 +252,13 @@ class Solis(BaseResource): # def apa_token(self, user_id, code): - response = self.request('asg/apa/generationJeton', data={ - 'indexIndividu': user_id, - 'codeConfidentiel': code, - }) + response = self.request( + 'asg/apa/generationJeton', + data={ + 'indexIndividu': user_id, + 'codeConfidentiel': code, + }, + ) return response.get('token') def apa_get_information(self, information, user_id=None, code=None, token=None, index=None): @@ -244,9 +269,12 @@ class Solis(BaseResource): endpoint += '/' + index return self.request(endpoint) - @endpoint(name='apa-link', methods=['post'], perm='can_access', - description=_('Create link between name_id and ' - 'Solis APA. Payload: name_id, user_id, code')) + @endpoint( + name='apa-link', + methods=['post'], + perm='can_access', + description=_('Create link between name_id and ' 'Solis APA. Payload: name_id, user_id, code'), + ) def apa_link(self, request): try: data = json_loads(request.body) @@ -262,16 +290,17 @@ class Solis(BaseResource): token = self.apa_token(user_id, code) # invalid credentials raise APIError here information = self.apa_get_information(information='exportDonneesIndividu', token=token) text = get_template(self.text_template_name).render(information).strip() - link, created = SolisAPALink.objects.update_or_create(resource=self, name_id=name_id, - user_id=user_id, - defaults={'code': code, - 'text': text}) - return {'data': {'user_id': user_id, - 'created': created, - 'updated': not created}} + link, created = SolisAPALink.objects.update_or_create( + resource=self, name_id=name_id, user_id=user_id, defaults={'code': code, 'text': text} + ) + return {'data': {'user_id': user_id, 'created': created, 'updated': not created}} - @endpoint(name='apa-unlink', methods=['post'], perm='can_access', - description=_('Delete a Solis APA link. Payload: name_id, user_id')) + @endpoint( + name='apa-unlink', + methods=['post'], + perm='can_access', + description=_('Delete a Solis APA link. Payload: name_id, user_id'), + ) def apa_unlink(self, request): try: data = json_loads(request.body) @@ -287,50 +316,57 @@ class Solis(BaseResource): SolisAPALink.objects.filter(resource=self, name_id=name_id, user_id=user_id).delete() return {'data': {'user_id': user_id, 'deleted': True}} - @endpoint(name='apa-links', perm='can_access', - description=_('List linked Solis APA users'), - parameters={ - 'name_id': { - 'description': _('user identifier'), - 'example_value': '3eb56fc' - } - }) + @endpoint( + name='apa-links', + perm='can_access', + description=_('List linked Solis APA users'), + parameters={'name_id': {'description': _('user identifier'), 'example_value': '3eb56fc'}}, + ) def apa_links(self, request, name_id): - return {'data': [{'id': link.user_id, 'text': link.text} - for link in SolisAPALink.objects.filter(resource=self, name_id=name_id)]} + return { + 'data': [ + {'id': link.user_id, 'text': link.text} + for link in SolisAPALink.objects.filter(resource=self, name_id=name_id) + ] + } - @endpoint(name='apa-user-info', perm='can_access', - description=_('Get informations about a linked Solis APA user'), - parameters={ - 'name_id': { - 'description': _('user identifier'), - 'example_value': '3eb56fc' - }, - 'user_id': { - 'description': _('Solis APA user identifier'), - 'example_value': '2345', - }, - 'information': { - 'description': _('exportDonneesIndividu, consultationDeMesDroits, ' - 'suiviDemandeEnInstruction, suiviDemandeHistorique, ' - 'propositionPlanAide, demandeUnitaire'), - 'example_value': 'consultationDeMesDroits', - }, - 'index': { - 'description': _('mandatory if information is "demandeUnitaire"'), - 'example_value': '87123' - } - }) - def apa_user_info(self, request, name_id, user_id, information='exportDonneesIndividu', - index=None): + @endpoint( + name='apa-user-info', + perm='can_access', + description=_('Get informations about a linked Solis APA user'), + parameters={ + 'name_id': {'description': _('user identifier'), 'example_value': '3eb56fc'}, + 'user_id': { + 'description': _('Solis APA user identifier'), + 'example_value': '2345', + }, + 'information': { + 'description': _( + 'exportDonneesIndividu, consultationDeMesDroits, ' + 'suiviDemandeEnInstruction, suiviDemandeHistorique, ' + 'propositionPlanAide, demandeUnitaire' + ), + 'example_value': 'consultationDeMesDroits', + }, + 'index': { + 'description': _('mandatory if information is "demandeUnitaire"'), + 'example_value': '87123', + }, + }, + ) + def apa_user_info(self, request, name_id, user_id, information='exportDonneesIndividu', index=None): if information == 'demandeUnitaire' and index is None: raise APIError('index mandatory if information=demandeUnitaire', http_status=400) try: link = SolisAPALink.objects.get(resource=self, name_id=name_id, user_id=user_id) except SolisAPALink.DoesNotExist: raise APIError('unknown link') - response = self.apa_get_information(information=information, user_id=user_id, code=link.code, - index=index if information == 'demandeUnitaire' else None) + response = self.apa_get_information( + information=information, + user_id=user_id, + code=link.code, + index=index if information == 'demandeUnitaire' else None, + ) if information == 'exportDonneesIndividu': text = get_template(self.text_template_name).render(response).strip() if text != link.text: @@ -344,21 +380,20 @@ class Solis(BaseResource): raise APIError('cannot find indexDemande=%s in demandeAsg list' % index) return {'data': response} - @endpoint(name='apa-users', perm='can_access', - description=_('Get exportDonneesIndividu datas about all linked Solis APA users'), - parameters={ - 'name_id': { - 'description': _('user identifier'), - 'example_value': '3eb56fc' - } - }) + @endpoint( + name='apa-users', + perm='can_access', + description=_('Get exportDonneesIndividu datas about all linked Solis APA users'), + parameters={'name_id': {'description': _('user identifier'), 'example_value': '3eb56fc'}}, + ) def apa_users(self, request, name_id): users = [] template = get_template(self.text_template_name) for link in SolisAPALink.objects.filter(resource=self, name_id=name_id): try: - information = self.apa_get_information(information='exportDonneesIndividu', - user_id=link.user_id, code=link.code) + information = self.apa_get_information( + information='exportDonneesIndividu', user_id=link.user_id, code=link.code + ) except APIError: # don't list unknown/unlinked users continue @@ -366,14 +401,15 @@ class Solis(BaseResource): if text != link.text: link.text = text link.save() - users.append({ - 'id': link.user_id, - 'text': text, - 'information': information}) + users.append({'id': link.user_id, 'text': text, 'information': information}) return {'data': users} - @endpoint(name='apa-integration', perm='can_access', methods=['post'], - description=_('Send data to "integrationDemandeApa"')) + @endpoint( + name='apa-integration', + perm='can_access', + methods=['post'], + description=_('Send data to "integrationDemandeApa"'), + ) def apa_integration(self, request): try: payload = json_loads(request.body) @@ -437,7 +473,7 @@ class Solis(BaseResource): return { 'data': response, 'files_sent': sendfiles, - 'files_failed_pdf_conversion': files_failed_pdf_conversion + 'files_failed_pdf_conversion': files_failed_pdf_conversion, } # @@ -465,15 +501,11 @@ class Solis(BaseResource): href = 'https:' + href[5:] if not href.startswith(self.service_url): return - endpoint = href[len(self.service_url):] + endpoint = href[len(self.service_url) :] try: value = self.request(endpoint) except APIError as e: # do not raise on linked informations - value = { - 'err': 1, - 'err_class': e.__class__.__name__, - 'err_desc': force_text(e) - } + value = {'err': 1, 'err_class': e.__class__.__name__, 'err_desc': force_text(e)} link['content'] = value def rsa_get_links(self, information, links): @@ -494,8 +526,9 @@ class Solis(BaseResource): if link in information['rsa_links']: self.rsa_fill_with_link_content(information['rsa_links'][link]) - def rsa_get_information(self, information, user_id=None, code=None, dob=None, token=None, - index='search', links=None): + def rsa_get_information( + self, information, user_id=None, code=None, dob=None, token=None, index='search', links=None + ): # simulate "individu" referential: get user details from civi/individu/user_id if information == 'individu': if not user_id: @@ -503,8 +536,7 @@ class Solis(BaseResource): endpoint = 'referentiels/%s/%s/%s/' % ('civi', 'individu', user_id) content = self.request(endpoint) if not isinstance(content, dict): - raise APIError('civi/individu response is not a dictionnary', - data={'json_content': content}) + raise APIError('civi/individu response is not a dictionnary', data={'json_content': content}) return content if token is None: token = self.rsa_token(user_id, code, dob) @@ -525,9 +557,14 @@ class Solis(BaseResource): return information - @endpoint(name='rsa-link', methods=['post'], perm='can_access', - description=_('Create link between name_id and ' - 'Solis RSA. Payload: name_id, user_id, code, dob (optionnal)')) + @endpoint( + name='rsa-link', + methods=['post'], + perm='can_access', + description=_( + 'Create link between name_id and ' 'Solis RSA. Payload: name_id, user_id, code, dob (optionnal)' + ), + ) def rsa_link(self, request): try: data = json_loads(request.body) @@ -544,18 +581,17 @@ class Solis(BaseResource): self.rsa_token(user_id, code, dob) # invalid credentials raise APIError here information = self.rsa_get_information('individu', user_id, code, dob) text = get_template(self.text_template_name_rsa).render(information).strip() - link, created = SolisRSALink.objects.update_or_create(resource=self, name_id=name_id, - user_id=user_id, - defaults={'code': code, - 'dob': dob, - 'text': text}) - return {'data': {'user_id': user_id, - 'text': text, - 'created': created, - 'updated': not created}} + link, created = SolisRSALink.objects.update_or_create( + resource=self, name_id=name_id, user_id=user_id, defaults={'code': code, 'dob': dob, 'text': text} + ) + return {'data': {'user_id': user_id, 'text': text, 'created': created, 'updated': not created}} - @endpoint(name='rsa-unlink', methods=['post'], perm='can_access', - description=_('Delete a Solis RSA link. Payload: name_id, user_id')) + @endpoint( + name='rsa-unlink', + methods=['post'], + perm='can_access', + description=_('Delete a Solis RSA link. Payload: name_id, user_id'), + ) def rsa_unlink(self, request): try: data = json_loads(request.body) @@ -571,55 +607,62 @@ class Solis(BaseResource): SolisRSALink.objects.filter(resource=self, name_id=name_id, user_id=user_id).delete() return {'data': {'user_id': user_id, 'deleted': True}} - @endpoint(name='rsa-links', perm='can_access', - description=_('List linked Solis RSA users'), - parameters={ - 'name_id': { - 'description': _('user identifier'), - 'example_value': '3eb56fc' - } - }) + @endpoint( + name='rsa-links', + perm='can_access', + description=_('List linked Solis RSA users'), + parameters={'name_id': {'description': _('user identifier'), 'example_value': '3eb56fc'}}, + ) def rsa_links(self, request, name_id): - return {'data': [{'id': link.user_id, 'text': link.text} - for link in SolisRSALink.objects.filter(resource=self, name_id=name_id)]} + return { + 'data': [ + {'id': link.user_id, 'text': link.text} + for link in SolisRSALink.objects.filter(resource=self, name_id=name_id) + ] + } - @endpoint(name='rsa-user-info', perm='can_access', - description=_('Get informations about a linked Solis RSA user'), - parameters={ - 'name_id': { - 'description': _('user identifier'), - 'example_value': '3eb56fc' - }, - 'user_id': { - 'description': _('Solis RSA user identifier'), - 'example_value': '4273', - }, - 'information': { - 'description': _('individu, actions, allocataires, engagements, ' - 'evaluations, evenements, indus, menages, presences, rdvs'), - 'example_value': 'allocataires', - }, - 'index': { - 'description': _('get a specific item, if applicable'), - }, - 'links': { - 'description': _('get linked informations (comma separated list, empty for all)'), - 'example_value': 'etatCivil,conjoint', - }, - 'filters': { - 'description': _('filter response (list), ex: idStructure=399 or ' - 'idStructure!=399,prescriptionPlacement=Placement'), - }, - }) - def rsa_user_info(self, request, name_id, user_id, information='individu', - index='search', links=None, filters=None): + @endpoint( + name='rsa-user-info', + perm='can_access', + description=_('Get informations about a linked Solis RSA user'), + parameters={ + 'name_id': {'description': _('user identifier'), 'example_value': '3eb56fc'}, + 'user_id': { + 'description': _('Solis RSA user identifier'), + 'example_value': '4273', + }, + 'information': { + 'description': _( + 'individu, actions, allocataires, engagements, ' + 'evaluations, evenements, indus, menages, presences, rdvs' + ), + 'example_value': 'allocataires', + }, + 'index': { + 'description': _('get a specific item, if applicable'), + }, + 'links': { + 'description': _('get linked informations (comma separated list, empty for all)'), + 'example_value': 'etatCivil,conjoint', + }, + 'filters': { + 'description': _( + 'filter response (list), ex: idStructure=399 or ' + 'idStructure!=399,prescriptionPlacement=Placement' + ), + }, + }, + ) + def rsa_user_info( + self, request, name_id, user_id, information='individu', index='search', links=None, filters=None + ): try: link = SolisRSALink.objects.get(resource=self, name_id=name_id, user_id=user_id) except SolisRSALink.DoesNotExist: raise APIError('unknown link') - response = self.rsa_get_information(information=information, - user_id=user_id, code=link.code, dob=link.dob, - index=index, links=links) + response = self.rsa_get_information( + information=information, user_id=user_id, code=link.code, dob=link.dob, index=index, links=links + ) if information == 'individu': text = get_template(self.text_template_name_rsa).render(response).strip() if text != link.text: @@ -629,11 +672,9 @@ class Solis(BaseResource): for filter_ in filters.split(','): key, value = filter_.split('=') if key.endswith('!'): - response = [item for item in response - if str(item.get(key[:-1])) != value] + response = [item for item in response if str(item.get(key[:-1])) != value] else: - response = [item for item in response - if str(item.get(key)) == value] + response = [item for item in response if str(item.get(key)) == value] return {'data': response} diff --git a/passerelle/apps/sp_fr/admin.py b/passerelle/apps/sp_fr/admin.py index 595eb33f..83c166d4 100644 --- a/passerelle/apps/sp_fr/admin.py +++ b/passerelle/apps/sp_fr/admin.py @@ -27,6 +27,7 @@ class RequestAdmin(admin.ModelAdmin): def form_url(self, obj): return format_html('{0}', obj.url) + form_url.allow_tags = True diff --git a/passerelle/apps/sp_fr/fields.py b/passerelle/apps/sp_fr/fields.py index 563dd6a2..625cebdd 100644 --- a/passerelle/apps/sp_fr/fields.py +++ b/passerelle/apps/sp_fr/fields.py @@ -51,8 +51,9 @@ class VariableAndExpressionWidget(forms.MultiWidget): class VariableAndExpressionField(forms.MultiValueField): widget = VariableAndExpressionWidget - def __init__(self, choices=(), required=True, widget=None, label=None, - initial=None, help_text='', *args, **kwargs): + def __init__( + self, choices=(), required=True, widget=None, label=None, initial=None, help_text='', *args, **kwargs + ): fields = [ forms.ChoiceField(choices=choices, required=required), forms.CharField(required=False, validators=[validate_django_template]), @@ -64,7 +65,10 @@ class VariableAndExpressionField(forms.MultiValueField): label=label, initial=initial, help_text=help_text, - require_all_fields=False, *args, **kwargs) + require_all_fields=False, + *args, + **kwargs, + ) self.choices = choices def _get_choices(self): @@ -80,6 +84,7 @@ class VariableAndExpressionField(forms.MultiValueField): value = list(value) self._choices = value self.widget.widgets[0].choices = value + choices = property(_get_choices, _set_choices) def compress(self, data): diff --git a/passerelle/apps/sp_fr/forms.py b/passerelle/apps/sp_fr/forms.py index 639eea16..e1402369 100644 --- a/passerelle/apps/sp_fr/forms.py +++ b/passerelle/apps/sp_fr/forms.py @@ -20,7 +20,6 @@ from . import models, fields class MappingForm(forms.ModelForm): - def __init__(self, *args, **kwargs): super(MappingForm, self).__init__(*args, **kwargs) if self.instance.procedure and self.instance and self.instance.formdef: @@ -31,10 +30,8 @@ class MappingForm(forms.ModelForm): base_name = str(field.varname or i) initial = self.instance.rules.get('fields', {}).get(base_name) self.fields['field_%s' % base_name] = fields.VariableAndExpressionField( - label=label, - choices=choices, - initial=initial, - required=False) + label=label, choices=choices, initial=initial, required=False + ) def table_fields(self): return [field for field in self if field.name.startswith('field_')] @@ -54,7 +51,7 @@ class MappingForm(forms.ModelForm): continue if not self.cleaned_data[key]: continue - real_key = key[len('field_'):] + real_key = key[len('field_') :] value = self.cleaned_data[key].copy() value['label'] = self.fields[key].label fields[real_key] = value diff --git a/passerelle/apps/sp_fr/migrations/0001_initial.py b/passerelle/apps/sp_fr/migrations/0001_initial.py index 471d92f0..13dfd96f 100644 --- a/passerelle/apps/sp_fr/migrations/0001_initial.py +++ b/passerelle/apps/sp_fr/migrations/0001_initial.py @@ -22,10 +22,30 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Mapping', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('procedure', models.CharField(choices=[(b'DOC', 'Request for construction site opening'), (b'recensementCitoyen', 'Request for mandatory citizen census'), (b'depotDossierPACS', 'Pre-request for citizen solidarity pact')], max_length=32, unique=True, verbose_name='Procedure')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + ( + 'procedure', + models.CharField( + choices=[ + (b'DOC', 'Request for construction site opening'), + (b'recensementCitoyen', 'Request for mandatory citizen census'), + (b'depotDossierPACS', 'Pre-request for citizen solidarity pact'), + ], + max_length=32, + unique=True, + verbose_name='Procedure', + ), + ), ('formdef', passerelle.utils.wcs.FormDefField(verbose_name='Formdef')), - ('rules', django.contrib.postgres.fields.jsonb.JSONField(default=passerelle.apps.sp_fr.models.default_rule, verbose_name='Rules')), + ( + 'rules', + django.contrib.postgres.fields.jsonb.JSONField( + default=passerelle.apps.sp_fr.models.default_rule, verbose_name='Rules' + ), + ), ], options={ 'verbose_name': 'MDEL mapping', @@ -35,12 +55,28 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Request', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')), ('modified', models.DateTimeField(auto_now=True, verbose_name='Created')), ('filename', models.CharField(max_length=128, verbose_name='Identifier')), ('archive', models.FileField(max_length=256, upload_to=b'', verbose_name='Archive')), - ('state', models.CharField(choices=[(b'received', 'Received'), (b'transfered', 'Transferred'), (b'error', 'Error'), (b'returned', 'Returned')], default=b'received', max_length=16, verbose_name='State')), + ( + 'state', + models.CharField( + choices=[ + (b'received', 'Received'), + (b'transfered', 'Transferred'), + (b'error', 'Error'), + (b'returned', 'Returned'), + ], + default=b'received', + max_length=16, + verbose_name='State', + ), + ), ('url', models.URLField(blank=True, verbose_name='URL')), ], options={ @@ -51,13 +87,30 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Resource', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), - ('input_sftp', passerelle.utils.sftp.SFTPField(default=None, null=True, verbose_name='Input SFTP URL')), - ('output_sftp', passerelle.utils.sftp.SFTPField(default=None, null=True, verbose_name='Output SFTP URL')), - ('users', models.ManyToManyField(blank=True, related_name='_resource_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'input_sftp', + passerelle.utils.sftp.SFTPField(default=None, null=True, verbose_name='Input SFTP URL'), + ), + ( + 'output_sftp', + passerelle.utils.sftp.SFTPField(default=None, null=True, verbose_name='Output SFTP URL'), + ), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_resource_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'Service-Public.fr', @@ -66,12 +119,19 @@ class Migration(migrations.Migration): migrations.AddField( model_name='request', name='resource', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sp_fr.Resource', verbose_name='Resource'), + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to='sp_fr.Resource', verbose_name='Resource' + ), ), migrations.AddField( model_name='mapping', name='resource', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='mappings', to='sp_fr.Resource', verbose_name='Resource'), + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name='mappings', + to='sp_fr.Resource', + verbose_name='Resource', + ), ), migrations.AlterUniqueTogether( name='request', diff --git a/passerelle/apps/sp_fr/migrations/0002_auto_20200504_1402.py b/passerelle/apps/sp_fr/migrations/0002_auto_20200504_1402.py index 4425239e..855b8284 100644 --- a/passerelle/apps/sp_fr/migrations/0002_auto_20200504_1402.py +++ b/passerelle/apps/sp_fr/migrations/0002_auto_20200504_1402.py @@ -15,7 +15,16 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='mapping', name='procedure', - field=models.CharField(choices=[('DOC', 'Request for construction site opening'), ('recensementCitoyen', 'Request for mandatory citizen census'), ('depotDossierPACS', 'Pre-request for citizen solidarity pact')], max_length=32, unique=True, verbose_name='Procedure'), + field=models.CharField( + choices=[ + ('DOC', 'Request for construction site opening'), + ('recensementCitoyen', 'Request for mandatory citizen census'), + ('depotDossierPACS', 'Pre-request for citizen solidarity pact'), + ], + max_length=32, + unique=True, + verbose_name='Procedure', + ), ), migrations.AlterField( model_name='request', @@ -25,6 +34,16 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='request', name='state', - field=models.CharField(choices=[('received', 'Received'), ('transfered', 'Transferred'), ('error', 'Error'), ('returned', 'Returned')], default='received', max_length=16, verbose_name='State'), + field=models.CharField( + choices=[ + ('received', 'Received'), + ('transfered', 'Transferred'), + ('error', 'Error'), + ('returned', 'Returned'), + ], + default='received', + max_length=16, + verbose_name='State', + ), ), ] diff --git a/passerelle/apps/sp_fr/models.py b/passerelle/apps/sp_fr/models.py index b0491c33..673ee917 100644 --- a/passerelle/apps/sp_fr/models.py +++ b/passerelle/apps/sp_fr/models.py @@ -55,16 +55,12 @@ PROCEDURES = [ FILE_PATTERN = re.compile(r'^(?P.*)-(?P[a-zA-Z0-9]+)-(?P\d+).zip$') ENT_PATTERN = re.compile(r'^.*-ent-\d+(?:-.*)?.xml$') -NSMAP = { - 'dgme-metier': 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier' -} +NSMAP = {'dgme-metier': 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier'} ROUTAGE_XPATH = ET.XPath( - ('dgme-metier:Routage/dgme-metier:Donnee/dgme-metier:Valeur/text()'), - namespaces=NSMAP) + ('dgme-metier:Routage/dgme-metier:Donnee/dgme-metier:Valeur/text()'), namespaces=NSMAP +) -EMAIL_XPATH = ET.XPath( - ('dgme-metier:Teledemarche/dgme-metier:Email/text()'), - namespaces=NSMAP) +EMAIL_XPATH = ET.XPath(('dgme-metier:Teledemarche/dgme-metier:Email/text()'), namespaces=NSMAP) DOCUMENTS_XPATH = ET.XPath('dgme-metier:Document', namespaces=NSMAP) PIECE_JOINTE_XPATH = ET.XPath('dgme-metier:PieceJointe', namespaces=NSMAP) @@ -72,7 +68,9 @@ CODE_XPATH = ET.XPath('dgme-metier:Code', namespaces=NSMAP) FICHIER_XPATH = ET.XPath('dgme-metier:Fichier', namespaces=NSMAP) FICHIER_DONNEES_XPATH = ET.XPath('.//dgme-metier:FichierDonnees', namespaces=NSMAP) -ET.register_namespace('dgme-metier', 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier') +ET.register_namespace( + 'dgme-metier', 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier' +) def simplify(s): @@ -91,13 +89,9 @@ def simplify(s): class Resource(BaseResource): category = _('Business Process Connectors') - input_sftp = SFTPField( - verbose_name=_('Input SFTP URL'), - null=True) + input_sftp = SFTPField(verbose_name=_('Input SFTP URL'), null=True) - output_sftp = SFTPField( - verbose_name=_('Output SFTP URL'), - null=True) + output_sftp = SFTPField(verbose_name=_('Output SFTP URL'), null=True) def check_status(self): with self.input_sftp.client() as sftp: @@ -145,16 +139,20 @@ class Resource(BaseResource): for filename in helper(): m = FILE_PATTERN.match(filename) if not m: - self.logger.info('file "%s" did not match pattern %s, moving to FAILED/', - filename, FILE_PATTERN) + self.logger.info( + 'file "%s" did not match pattern %s, moving to FAILED/', filename, FILE_PATTERN + ) sftp.rename(filename, 'FAILED/' + filename) continue procedure = m.group('procedure') try: mapping = self.mappings.get(procedure=procedure) except Mapping.DoesNotExist: - self.logger.info('no mapping for procedure "%s" for file "%s", moving to FAILED/', - procedure, filename) + self.logger.info( + 'no mapping for procedure "%s" for file "%s", moving to FAILED/', + procedure, + filename, + ) continue handler = self.FileHandler( @@ -164,7 +162,8 @@ class Resource(BaseResource): identifier=m.group('identifier'), procedure=procedure, sequence=m.group('sequence'), - mapping=mapping) + mapping=mapping, + ) if not handler.request: count -= 1 try: @@ -204,9 +203,7 @@ class Resource(BaseResource): if not self.request: with self.sftp.open(self.filename) as fd: with transaction.atomic(): - self.request = Request.objects.create( - resource=self.resource, - filename=self.filename) + self.request = Request.objects.create(resource=self.resource, filename=self.filename) self.request.state = Request.STATE_RECEIVED self.request.archive.save(self.filename, File(fd)) if self.request.state == Request.STATE_RECEIVED: @@ -338,15 +335,13 @@ class Resource(BaseResource): with self.resource.output_sftp.client() as client: with client.open(self.request.response_zip_filename, mode='w') as fd: self.request.build_response_zip( - fd, - etat='100', - commentaire=u'Demande transmise à la collectivité') + fd, etat='100', commentaire=u'Demande transmise à la collectivité' + ) with self.resource.input_sftp.client() as client: with client.open('DONE/' + self.request.response_zip_filename, mode='w') as fd: self.request.build_response_zip( - fd, - etat='100', - commentaire=u'Demande transmise à la collectivité') + fd, etat='100', commentaire=u'Demande transmise à la collectivité' + ) def get_data(self, data, name): # prevent error in manual mapping @@ -359,7 +354,9 @@ class Resource(BaseResource): numero_permis_construire = get('doc_declarant_designation_permis_numero_permis_construire') numero_permis_amenager = get('doc_declarant_designation_permis_numero_permis_amenager') - data['type_permis'] = u'Un permis de construire' if numero_permis_construire else u'Un permis d\'aménager' + data['type_permis'] = ( + u'Un permis de construire' if numero_permis_construire else u'Un permis d\'aménager' + ) data['numero_permis'] = numero_permis_construire or numero_permis_amenager particulier = get('doc_declarant_identite_type_personne').strip().lower() == 'true' data['type_declarant'] = u'Un particulier' if particulier else u'Une personne morale' @@ -368,52 +365,48 @@ class Resource(BaseResource): data['prenoms'] = get('doc_declarant_identite_personne_physique_prenom') else: data['nom'] = get('doc_declarant_identite_personne_morale_representant_personne_morale_nom') - data['prenoms'] = get('doc_declarant_identite_personne_morale_representant_personne_morale_prenom') + data['prenoms'] = get( + 'doc_declarant_identite_personne_morale_representant_personne_morale_prenom' + ) mapping = { '1000': 'Monsieur', '1001': 'Madame', '1002': 'Madame et Monsieur', } if particulier: - data['civilite_particulier'] = mapping.get(get('doc_declarant_identite_personne_physique_civilite'), '') + data['civilite_particulier'] = mapping.get( + get('doc_declarant_identite_personne_physique_civilite'), '' + ) else: data['civilite_pm'] = mapping.get( - get('doc_declarant_identite_personne_morale_representant_personne_morale_civilite'), '') - data['portee'] = (u'Pour la totalité des travaux' - if get('doc_ouverture_chantier_totalite_travaux').lower().strip() == 'true' - else u'Pour une tranche des travaux') + get('doc_declarant_identite_personne_morale_representant_personne_morale_civilite'), '' + ) + data['portee'] = ( + u'Pour la totalité des travaux' + if get('doc_ouverture_chantier_totalite_travaux').lower().strip() == 'true' + else u'Pour une tranche des travaux' + ) def update_data_recensementCitoyen(self, data): def get(name): return self.get_data(data, name) - motif = ( - get('recensementcitoyen_formalite_formalitemotifcode_1') - or get('recensementcitoyen_formalite_formalitemotifcode_2') + motif = get('recensementcitoyen_formalite_formalitemotifcode_1') or get( + 'recensementcitoyen_formalite_formalitemotifcode_2' ) - data['motif'] = { - 'RECENSEMENT': '1', - 'EXEMPTION': '2' - }[motif] + data['motif'] = {'RECENSEMENT': '1', 'EXEMPTION': '2'}[motif] if data['motif'] == '2': data['motif_exempte'] = ( u"Titulaire d'une carte d'invalidité de 80% minimum" if get('recensementcitoyen_formalite_formalitemotifcode_2') == 'INFIRME' - else u"Autre situation") + else u"Autre situation" + ) data['justificatif_exemption'] = get('pj_je') - data['double_nationalite'] = ( - 'Oui' - if get('recensementcitoyen_personne_nationalite') - else 'Non') + data['double_nationalite'] = 'Oui' if get('recensementcitoyen_personne_nationalite') else 'Non' data['residence_differente'] = ( - 'Oui' - if get('recensementcitoyen_personne_adresseresidence_localite') - else 'Non') - data['civilite'] = ( - 'Monsieur' - if get('recensementcitoyen_personne_civilite') == 'M' - else 'Madame' + 'Oui' if get('recensementcitoyen_personne_adresseresidence_localite') else 'Non' ) + data['civilite'] = 'Monsieur' if get('recensementcitoyen_personne_civilite') == 'M' else 'Madame' def get_lieu_naissance(variable, code): for idx in ['', '_1', '_2']: @@ -422,7 +415,9 @@ class Resource(BaseResource): return get(v + '_nom') data['cp_naissance'] = get_lieu_naissance('recensementcitoyen_personne_lieunaissance', 'AUTRE') - data['commune_naissance'] = get_lieu_naissance('recensementcitoyen_personne_lieunaissance', 'COMMUNE') + data['commune_naissance'] = get_lieu_naissance( + 'recensementcitoyen_personne_lieunaissance', 'COMMUNE' + ) data['justificatif_identite'] = get('pj_ji') situation_matrimoniale = get('recensementcitoyen_personne_situationfamille_situationmatrimoniale') data['situation_familiale'] = { @@ -432,11 +427,7 @@ class Resource(BaseResource): if data['situation_familiale'] == u'Autres': data['situation_familiale_precision'] = situation_matrimoniale pupille = get('recensementcitoyen_personne_situationfamille_pupille') - data['pupille'] = ( - 'Oui' - if pupille - else 'Non' - ) + data['pupille'] = 'Oui' if pupille else 'Non' data['pupille_categorie'] = { 'NATION': u"Pupille de la nation", 'ETAT': u"Pupille de l'État", @@ -451,19 +442,21 @@ class Resource(BaseResource): data['justificatif_famille'] = get('pj_jf') data['filiation_inconnue_p1'] = not get('recensementcitoyen_filiationpere_nomfamille') data['filiation_inconnue_p2'] = not get('recensementcitoyen_filiationmere_nomfamille') - data['cp_naissance_p1'] = get_lieu_naissance('recensementcitoyen_filiationpere_lieunaissance', 'AUTRE') - data['cp_naissance_p2'] = get_lieu_naissance('recensementcitoyen_filiationmere_lieunaissance', 'AUTRE') + data['cp_naissance_p1'] = get_lieu_naissance( + 'recensementcitoyen_filiationpere_lieunaissance', 'AUTRE' + ) + data['cp_naissance_p2'] = get_lieu_naissance( + 'recensementcitoyen_filiationmere_lieunaissance', 'AUTRE' + ) data['commune_naissance_p1'] = get_lieu_naissance( - 'recensementcitoyen_filiationpere_lieunaissance', 'COMMUNE') + 'recensementcitoyen_filiationpere_lieunaissance', 'COMMUNE' + ) data['commune_naissance_p2'] = get_lieu_naissance( - 'recensementcitoyen_filiationmere_lieunaissance', 'COMMUNE') + 'recensementcitoyen_filiationmere_lieunaissance', 'COMMUNE' + ) for key in data: if key.endswith('_datenaissance') and data[key]: - data[key] = ( - datetime.datetime.strptime(data[key], '%d/%m/%Y') - .date() - .strftime('%Y-%m-%d') - ) + data[key] = datetime.datetime.strptime(data[key], '%d/%m/%Y').date().strftime('%Y-%m-%d') def update_data_depotDossierPACS(self, data): def get(name): @@ -472,17 +465,23 @@ class Resource(BaseResource): civilite_p1 = get('pacs_partenaire1_civilite') data['civilite_p1'] = 'Monsieur' if civilite_p1 == 'M' else 'Madame' data['acte_naissance_p1'] = get('pj_an') - data['identite_verifiee_p1'] = 'Oui' if get('pacs_partenaire1_titreidentiteverifie') == 'true' else 'Non' + data['identite_verifiee_p1'] = ( + 'Oui' if get('pacs_partenaire1_titreidentiteverifie') == 'true' else 'Non' + ) civilite_p2 = get('pacs_partenaire2_civilite') data['civilite_p2'] = 'Monsieur' if civilite_p2 == 'M' else 'Madame' data['acte_naissance_p2'] = get('pj_anp') - data['identite_verifiee_p2'] = 'Oui' if get('pacs_partenaire2_titreidentiteverifie') == 'true' else 'Non' + data['identite_verifiee_p2'] = ( + 'Oui' if get('pacs_partenaire2_titreidentiteverifie') == 'true' else 'Non' + ) data['type_convention'] = '2' if get('pacs_convention_conventionspecifique') == 'true' else '1' data['aide_materielle'] = ( - '1' if get('pacs_convention_conventiontype_aidemateriel_typeaidemateriel') == 'aideProportionnel' - else '2') + '1' + if get('pacs_convention_conventiontype_aidemateriel_typeaidemateriel') == 'aideProportionnel' + else '2' + ) data['regime'] = '1' if get('pacs_convention_conventiontype_regimepacs') == 'legal' else '2' data['convention_specifique'] = get('pj_cp') @@ -509,6 +508,7 @@ class Resource(BaseResource): # case of multiple nodes new_path = path[:-1] + [path[-1] + '_1'] yield new_path, text_content(node) + return {'_'.join(path): value for path, value in helper([tag_name(root)], root)} def export_json(self): @@ -537,28 +537,17 @@ def default_rule(): @six.python_2_unicode_compatible class Mapping(models.Model): resource = models.ForeignKey( - Resource, - verbose_name=_('Resource'), - related_name='mappings', - on_delete=models.CASCADE) + Resource, verbose_name=_('Resource'), related_name='mappings', on_delete=models.CASCADE + ) - procedure = models.CharField( - verbose_name=_('Procedure'), - choices=PROCEDURES, - unique=True, - max_length=32) + procedure = models.CharField(verbose_name=_('Procedure'), choices=PROCEDURES, unique=True, max_length=32) - formdef = FormDefField( - verbose_name=_('Formdef')) + formdef = FormDefField(verbose_name=_('Formdef')) - rules = JSONField( - verbose_name=_('Rules'), - default=default_rule) + rules = JSONField(verbose_name=_('Rules'), default=default_rule) def get_absolute_url(self): - return reverse('sp-fr-mapping-edit', kwargs=dict( - slug=self.resource.slug, - pk=self.pk)) + return reverse('sp-fr-mapping-edit', kwargs=dict(slug=self.resource.slug, pk=self.pk)) @property def xsd(self): @@ -638,8 +627,8 @@ class Mapping(models.Model): def __str__(self): return ugettext('Mapping from "{procedure}" to formdef "{formdef}"').format( - procedure=self.get_procedure_display(), - formdef=self.formdef.title if self.formdef else '-') + procedure=self.get_procedure_display(), formdef=self.formdef.title if self.formdef else '-' + ) def export_json(self): return { @@ -650,9 +639,9 @@ class Mapping(models.Model): @classmethod def import_json(cls, d, resource): - mapping = cls.objects.filter( - resource=resource, - procedure=d['procedure']).first() or cls(resource=resource, procedure=d['procedure']) + mapping = cls.objects.filter(resource=resource, procedure=d['procedure']).first() or cls( + resource=resource, procedure=d['procedure'] + ) mapping.formdef = d['formdef'] mapping.rules = d['rules'] mapping.save() @@ -680,36 +669,19 @@ class Request(models.Model): (STATE_RETURNED, _('Returned')), ] - resource = models.ForeignKey( - Resource, - verbose_name=_('Resource'), - on_delete=models.CASCADE) + resource = models.ForeignKey(Resource, verbose_name=_('Resource'), on_delete=models.CASCADE) - created = models.DateTimeField( - verbose_name=_('Created'), - auto_now_add=True) + created = models.DateTimeField(verbose_name=_('Created'), auto_now_add=True) - modified = models.DateTimeField( - verbose_name=_('Created'), - auto_now=True) + modified = models.DateTimeField(verbose_name=_('Created'), auto_now=True) - filename = models.CharField( - verbose_name=_('Identifier'), - max_length=128) + filename = models.CharField(verbose_name=_('Identifier'), max_length=128) - archive = models.FileField( - verbose_name=_('Archive'), - max_length=256) + archive = models.FileField(verbose_name=_('Archive'), max_length=256) - state = models.CharField( - verbose_name=_('State'), - choices=STATES, - default=STATE_RECEIVED, - max_length=16) + state = models.CharField(verbose_name=_('State'), choices=STATES, default=STATE_RECEIVED, max_length=16) - url = models.URLField( - verbose_name=_('URL'), - blank=True) + url = models.URLField(verbose_name=_('URL'), blank=True) def delete(self, *args, **kwargs): try: @@ -735,7 +707,7 @@ class Request(models.Model): message_xml = self.message_xml ns = { 'pec': 'http://finances.gouv.fr/dgme/pec/message/v1', - 'mdel': 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier' + 'mdel': 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier', } return message_xml.find('.//{%(pec)s}MessageId' % ns).text.split()[1] @@ -744,7 +716,7 @@ class Request(models.Model): ns = { 'pec': 'http://finances.gouv.fr/dgme/pec/message/v1', - 'mdel': 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier' + 'mdel': 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier', } template = ''' @@ -786,29 +758,31 @@ class Request(models.Model): response.find('.//{%(pec)s}MessageId' % ns).text = 'RET-1-' + message_id response.find('.//{%(pec)s}RefToMessageId' % ns).text = message_id response.find('.//{%(pec)s}FlowType' % ns).text = message_xml.find('.//{%(pec)s}FlowType' % ns).text - response.find('.//{%(pec)s}Sender' % ns).extend( - message_xml.find('.//{%(pec)s}Recipient' % ns)) - response.find('.//{%(pec)s}Recipient' % ns).extend( - message_xml.find('.//{%(pec)s}Sender' % ns)) + response.find('.//{%(pec)s}Sender' % ns).extend(message_xml.find('.//{%(pec)s}Recipient' % ns)) + response.find('.//{%(pec)s}Recipient' % ns).extend(message_xml.find('.//{%(pec)s}Sender' % ns)) response.find('.//{%(pec)s}FlowType' % ns).text = message_xml.find('.//{%(pec)s}FlowType' % ns).text # Strangely the same node in the response does not have the same # namespace as the node in the request, whatever... response.find('.//{%(pec)s}NumeroTeledemarche' % ns).text = message_xml.find( - './/{%(mdel)s}NumeroTeledemarche' % ns).text - response.find('.//{%(pec)s}MotDePasse' % ns).text = message_xml.find('.//{%(mdel)s}MotDePasse' % ns).text + './/{%(mdel)s}NumeroTeledemarche' % ns + ).text + response.find('.//{%(pec)s}MotDePasse' % ns).text = message_xml.find( + './/{%(mdel)s}MotDePasse' % ns + ).text response.find('.//{%(pec)s}Etat' % ns).text = '100' response.find('.//{%(pec)s}Commentaire' % ns).text = u'Dossier transmis à la collectivité' return response def build_response_zip(self, fd_or_filename, etat, commentaire): with zipfile.ZipFile(fd_or_filename, 'w') as archive: - message_xml = self.build_message_xml_retour( - etat=etat, commentaire=commentaire) - archive.writestr('message.xml', - '' - + ET.tostring(message_xml, encoding='utf-8')) + message_xml = self.build_message_xml_retour(etat=etat, commentaire=commentaire) + archive.writestr( + 'message.xml', + '' + + ET.tostring(message_xml, encoding='utf-8'), + ) @property def response_zip_filename(self): @@ -819,15 +793,9 @@ class Request(models.Model): id_enveloppe = self.id_enveloppe numero_sequence = '1' - return '%s-%s-%s-%s.zip' % ( - numero_teledossier, - code_demarche, - id_enveloppe, - numero_sequence) + return '%s-%s-%s-%s.zip' % (numero_teledossier, code_demarche, id_enveloppe, numero_sequence) class Meta: verbose_name = _('MDEL request') verbose_name_plural = _('MDEL requests') - unique_together = ( - ('resource', 'filename'), - ) + unique_together = (('resource', 'filename'),) diff --git a/passerelle/apps/sp_fr/urls.py b/passerelle/apps/sp_fr/urls.py index 63867a0a..f59fdc58 100644 --- a/passerelle/apps/sp_fr/urls.py +++ b/passerelle/apps/sp_fr/urls.py @@ -19,12 +19,12 @@ from django.conf.urls import url from . import views management_urlpatterns = [ - url(r'^(?P[\w,-]+)/mapping/new/$', - views.MappingNew.as_view(), name='sp-fr-mapping-new'), - url(r'^(?P[\w,-]+)/mapping/(?P\d+)/$', - views.MappingEdit.as_view(), name='sp-fr-mapping-edit'), - url(r'^(?P[\w,-]+)/mapping/(?P\d+)/delete/$', - views.MappingDelete.as_view(), name='sp-fr-mapping-delete'), - url(r'^(?P[\w,-]+)/run/$', - views.run, name='sp-fr-run'), + url(r'^(?P[\w,-]+)/mapping/new/$', views.MappingNew.as_view(), name='sp-fr-mapping-new'), + url(r'^(?P[\w,-]+)/mapping/(?P\d+)/$', views.MappingEdit.as_view(), name='sp-fr-mapping-edit'), + url( + r'^(?P[\w,-]+)/mapping/(?P\d+)/delete/$', + views.MappingDelete.as_view(), + name='sp-fr-mapping-delete', + ), + url(r'^(?P[\w,-]+)/run/$', views.run, name='sp-fr-run'), ] diff --git a/passerelle/apps/sp_fr/xsd.py b/passerelle/apps/sp_fr/xsd.py index 640af5fe..503e722d 100644 --- a/passerelle/apps/sp_fr/xsd.py +++ b/passerelle/apps/sp_fr/xsd.py @@ -63,7 +63,7 @@ TYPE_CASTER = { INT: int, INTEGER: int, DATE_TIME: isodate.parse_datetime, - ANY_TYPE: lambda v: v + ANY_TYPE: lambda v: v, } @@ -79,7 +79,8 @@ class Schema(object): def visit(self, root): assert root.tag == SCHEMA assert set(root.attrib) <= set(['targetNamespace', 'elementFormDefault', 'attributeFormDefault']), ( - 'unsupported schema attributes %s' % root.attrib) + 'unsupported schema attributes %s' % root.attrib + ) self.target_namespace = root.get('targetNamespace') self.element_form_default = root.get('elementFormDefault', self.element_form_default) self.attribute_form_default = root.get('attributeFormDefault', self.attribute_form_default) @@ -213,8 +214,10 @@ class Schema(object): sequence = [] for element_node in node: - assert element_node.tag in(ELEMENT, CHOICE), ( - 'unsupported sequence with child not an element or a choice %s' % ET.tostring(element_node)) + assert element_node.tag in ( + ELEMENT, + CHOICE, + ), 'unsupported sequence with child not an element or a choice %s' % ET.tostring(element_node) if element_node.tag == ELEMENT: sequence.append(self.visit_element(element_node)) elif element_node.tag == CHOICE: @@ -242,8 +245,7 @@ class Schema(object): def qname_display(self, name): if name.namespace in self.reverse_nsmap: - name = '%s:%s' % (self.reverse_nsmap[name.namespace], - name.localname) + name = '%s:%s' % (self.reverse_nsmap[name.namespace], name.localname) return six.text_type(name) def paths(self): @@ -266,12 +268,14 @@ class Schema(object): else: if max_occurs > 1: for i in range(max_occurs): - yield path[:-1] + [ET.QName(name.namespace, name.localname + '_%d' % (i + 1))], xsd_type + yield path[:-1] + [ + ET.QName(name.namespace, name.localname + '_%d' % (i + 1)) + ], xsd_type yield path, xsd_type else: - for extension in ([''] - if max_occurs == 1 - else [''] + ['_%s' % i for i in list(range(1, max_occurs + 1))]): + for extension in ( + [''] if max_occurs == 1 else [''] + ['_%s' % i for i in list(range(1, max_occurs + 1))] + ): new_path = path if name and not is_type: new_path = new_path + [ET.QName(name.namespace, name.localname + extension)] @@ -308,6 +312,7 @@ class Path(object): for child in node: if child.tag == path[0]: return helper(child, path[1:]) + if root.tag != self.path[0]: return None child = helper(root, self.path[1:]) diff --git a/passerelle/apps/twilio/migrations/0001_initial.py b/passerelle/apps/twilio/migrations/0001_initial.py index 525190b1..0c6b8c4b 100644 --- a/passerelle/apps/twilio/migrations/0001_initial.py +++ b/passerelle/apps/twilio/migrations/0001_initial.py @@ -17,16 +17,36 @@ class Migration(migrations.Migration): migrations.CreateModel( name='TwilioSMSGateway', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), ('description', models.TextField(verbose_name='Description')), - ('default_country_code', models.CharField(default='33', max_length=3, verbose_name='Default country code')), - ('default_trunk_prefix', models.CharField(default='0', max_length=2, verbose_name='Default trunk prefix')), - ('max_message_length', models.IntegerField(default=160, verbose_name='Maximum message length')), + ( + 'default_country_code', + models.CharField(default='33', max_length=3, verbose_name='Default country code'), + ), + ( + 'default_trunk_prefix', + models.CharField(default='0', max_length=2, verbose_name='Default trunk prefix'), + ), + ( + 'max_message_length', + models.IntegerField(default=160, verbose_name='Maximum message length'), + ), ('account_sid', models.CharField(max_length=64, verbose_name='Account Sid')), ('auth_token', models.CharField(max_length=64, verbose_name='Auth Token')), - ('users', models.ManyToManyField(blank=True, related_name='_twiliosmsgateway_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_twiliosmsgateway_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'Twilio', diff --git a/passerelle/apps/twilio/models.py b/passerelle/apps/twilio/models.py index bad49f69..7bfaf634 100644 --- a/passerelle/apps/twilio/models.py +++ b/passerelle/apps/twilio/models.py @@ -49,15 +49,15 @@ class TwilioSMSGateway(SMSResource): ['+33688888888', "Twilio error: my error message"], ['+33677777777', "Twilio error: my error message"], ], - } + }, }, { 'status_code': 201, 'result': { 'err': 0, 'data': None, - } - } + }, + }, ], } URL = 'https://api.twilio.com/2010-04-01/Accounts' @@ -77,11 +77,7 @@ class TwilioSMSGateway(SMSResource): auth = HTTPBasicAuth(self.account_sid, self.auth_token) results = [] for dest in destinations: - params = { - 'Body': text, - 'From': sender, - 'To': dest - } + params = {'Body': text, 'From': sender, 'To': dest} try: resp = self.requests.post(url, params, auth=auth) except requests.RequestException as exc: @@ -92,7 +88,5 @@ class TwilioSMSGateway(SMSResource): else: results.append(0) if any(results): - raise APIError( - 'Twilio error: some destinations failed', - data=list(zip(destinations, results))) + raise APIError('Twilio error: some destinations failed', data=list(zip(destinations, results))) return None diff --git a/passerelle/apps/vivaticket/migrations/0001_initial.py b/passerelle/apps/vivaticket/migrations/0001_initial.py index 703db869..90d3a26f 100644 --- a/passerelle/apps/vivaticket/migrations/0001_initial.py +++ b/passerelle/apps/vivaticket/migrations/0001_initial.py @@ -14,14 +14,25 @@ class Migration(migrations.Migration): migrations.CreateModel( name='VivaTicket', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('url', models.URLField(verbose_name='API URL')), ('login', models.CharField(max_length=256, verbose_name='API Login')), ('password', models.CharField(max_length=256, verbose_name='API Password')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_vivaticket_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_vivaticket_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'VivaTicket', diff --git a/passerelle/apps/vivaticket/models.py b/passerelle/apps/vivaticket/models.py index 711f5647..fef38ee5 100644 --- a/passerelle/apps/vivaticket/models.py +++ b/passerelle/apps/vivaticket/models.py @@ -31,8 +31,15 @@ EVENTBOOK_SCHEMA = { "description": "", "type": "object", "required": [ - "id", "email", "start_datetime", "end_datetime", "event", "theme", "room", "quantity", - "form_url" + "id", + "email", + "start_datetime", + "end_datetime", + "event", + "theme", + "room", + "quantity", + "form_url", ], "properties": { "id": { @@ -124,8 +131,8 @@ EVENTBOOK_SCHEMA = { "school_level": { "description": "School Level code", "type": "string", - } - } + }, + }, } @@ -211,23 +218,23 @@ class VivaTicket(BaseResource): query['room'] = room return self.get_setting('Settings/GetThemes', **query) - @endpoint(name='school-levels', perm='can_access', methods=['get'], - description=_('Get school levels')) + @endpoint(name='school-levels', perm='can_access', methods=['get'], description=_('Get school levels')) def school_levels(self, request): return self.get_setting('Settings/GetSchoolLevel') def get_or_create_contact(self, data, name_id=None): - contact_payload = {'Civility': data.get('title', ''), - 'LastName': data.get('last_name', ''), - 'FirstName': data.get('first_name', ''), - 'SocialReason': data.get('social_reason', ''), - 'Address1': data.get('address', ''), - 'ZipCode': data.get('zipcode', ''), - 'City': data.get('city', ''), - 'Country': data.get('country', ''), - 'Email': data['email'], - 'Phone': data.get('phone', ''), - 'Mobile': data.get('mobile', '') + contact_payload = { + 'Civility': data.get('title', ''), + 'LastName': data.get('last_name', ''), + 'FirstName': data.get('first_name', ''), + 'SocialReason': data.get('social_reason', ''), + 'Address1': data.get('address', ''), + 'ZipCode': data.get('zipcode', ''), + 'City': data.get('city', ''), + 'Country': data.get('country', ''), + 'Email': data['email'], + 'Phone': data.get('phone', ''), + 'Mobile': data.get('mobile', ''), } if name_id is not None: unhashed_external_code = name_id @@ -247,19 +254,21 @@ class VivaTicket(BaseResource): # update contact data contact_data = response.json() url = urlparse.urljoin(self.url, 'Contact/Put') - response = self.requests.put(url, params={'id': response.json()['InternalCode']}, - json={'Key': self.get_apikey(), 'Contact': contact_payload}) + response = self.requests.put( + url, + params={'id': response.json()['InternalCode']}, + json={'Key': self.get_apikey(), 'Contact': contact_payload}, + ) return {'InternalCode': internal_code} - @endpoint(perm='can_access', description=_('Book an event'), - post={ - 'description': _('Creates a booking for an event'), - 'request_body': { - 'schema': { - 'application/json': EVENTBOOK_SCHEMA - } - } - }) + @endpoint( + perm='can_access', + description=_('Book an event'), + post={ + 'description': _('Creates a booking for an event'), + 'request_body': {'schema': {'application/json': EVENTBOOK_SCHEMA}}, + }, + ) def book(self, request, post_data, nameid=None): booking = { 'externalCode': post_data['id'], @@ -267,16 +276,18 @@ class VivaTicket(BaseResource): 'endDateTime': post_data['end_datetime'], 'comment': post_data.get('booking_comment', ''), 'contact': self.get_or_create_contact(post_data, nameid), - 'roomList': [{ - 'eventCategoryCode': post_data['event'], - 'roomCode': post_data['room'], - 'themeCode': post_data['theme'], - 'quantity': post_data['quantity'], - 'startDateTime': post_data['start_datetime'], - 'endDateTime': post_data['end_datetime'], - 'comment': post_data.get('room_comment', ''), - 'schoolLevelCode': post_data.get('school_level', '') - }] + 'roomList': [ + { + 'eventCategoryCode': post_data['event'], + 'roomCode': post_data['room'], + 'themeCode': post_data['theme'], + 'quantity': post_data['quantity'], + 'startDateTime': post_data['start_datetime'], + 'endDateTime': post_data['end_datetime'], + 'comment': post_data.get('room_comment', ''), + 'schoolLevelCode': post_data.get('school_level', ''), + } + ], } headers = {'X-Vivaticket-Form-URL': post_data['form_url']} r = self.post('Booking/Post', {'Booking': booking}, headers=headers) diff --git a/passerelle/base/__init__.py b/passerelle/base/__init__.py index 9982d5cc..05ee0a91 100644 --- a/passerelle/base/__init__.py +++ b/passerelle/base/__init__.py @@ -18,6 +18,7 @@ import django.apps from django.apps import apps from django.utils.module_loading import import_string + class ConnectorAppMixin(object): def get_connector_model(self): return self._connector_model @@ -47,6 +48,7 @@ class AppConfig(django.apps.AppConfig): # connectors if they have a get_connector_model() method or a model # that inherits from BaseResource. from .models import BaseResource + for app in apps.get_app_configs(): connector_model = None if hasattr(app, 'get_connector_model'): @@ -68,4 +70,5 @@ class AppConfig(django.apps.AppConfig): # custom appconfig. app.__class__.__bases__ = (ConnectorAppMixin,) + app.__class__.__bases__ + default_app_config = 'passerelle.base.AppConfig' diff --git a/passerelle/base/forms.py b/passerelle/base/forms.py index 3c16c939..5b94eaec 100644 --- a/passerelle/base/forms.py +++ b/passerelle/base/forms.py @@ -11,8 +11,9 @@ class ApiUserForm(forms.ModelForm): class AccessRightForm(forms.ModelForm): - confirm_open_access = forms.BooleanField(label=_('Allow open access'), required=False, - widget=forms.HiddenInput()) + confirm_open_access = forms.BooleanField( + label=_('Allow open access'), required=False, widget=forms.HiddenInput() + ) class Meta: model = AccessRight @@ -25,9 +26,13 @@ class AccessRightForm(forms.ModelForm): def add_confirmation_checkbox(self): self.add_error(None, _('Selected user has no security.')) - self.add_error('confirm_open_access', - _('Check this box if you are sure you want to allow unauthenticated access to ' - 'endpoints. Otherwise, select a different API User.')) + self.add_error( + 'confirm_open_access', + _( + 'Check this box if you are sure you want to allow unauthenticated access to ' + 'endpoints. Otherwise, select a different API User.' + ), + ) self.fields['confirm_open_access'].widget = forms.CheckboxInput() @property diff --git a/passerelle/base/management/commands/cron.py b/passerelle/base/management/commands/cron.py index 9fccc1c0..34b89c3c 100644 --- a/passerelle/base/management/commands/cron.py +++ b/passerelle/base/management/commands/cron.py @@ -27,13 +27,23 @@ class Command(BaseCommand): help = 'Execute scheduled commands' def add_arguments(self, parser): - parser.add_argument('frequency', metavar='FREQUENCY', type=str, - help='hourly/daily/weekly/monthly/availability/jobs') - parser.add_argument('--connector', dest='connector', metavar='CONNECTOR', type=str, - help='limit updates to given connector type') - parser.add_argument('--connector-slug', dest='slug', metavar='SLUG', type=str, - help='limit updates to given connector slug') - + parser.add_argument( + 'frequency', metavar='FREQUENCY', type=str, help='hourly/daily/weekly/monthly/availability/jobs' + ) + parser.add_argument( + '--connector', + dest='connector', + metavar='CONNECTOR', + type=str, + help='limit updates to given connector type', + ) + parser.add_argument( + '--connector-slug', + dest='slug', + metavar='SLUG', + type=str, + help='limit updates to given connector slug', + ) def handle(self, frequency, **options): if frequency not in ('hourly', 'daily', 'weekly', 'monthly', 'availability', 'jobs'): @@ -48,14 +58,23 @@ class Command(BaseCommand): try: getattr(connector, frequency)() except Exception as e: - connector.logger.exception('connector "%s.%s" error running %s job' % ( - connector.get_connector_slug(), connector.slug, frequency)) - errors.append({'connector': connector, 'exception': e, 'traceback': traceback.format_exc()}) + connector.logger.exception( + 'connector "%s.%s" error running %s job' + % (connector.get_connector_slug(), connector.slug, frequency) + ) + errors.append( + {'connector': connector, 'exception': e, 'traceback': traceback.format_exc()} + ) if errors: for error in errors: if options['verbosity'] >= 1: - print(repr(error['connector']),) - print(' url:', getattr(settings, 'SITE_BASE_URL', '') + error['connector'].get_absolute_url()) + print( + repr(error['connector']), + ) + print( + ' url:', + getattr(settings, 'SITE_BASE_URL', '') + error['connector'].get_absolute_url(), + ) print(' error:', error['exception']) if options['verbosity'] >= 2: print(' traceback:') diff --git a/passerelle/base/management/commands/ensure_jsonb.py b/passerelle/base/management/commands/ensure_jsonb.py index 39c30945..1d7fed1b 100644 --- a/passerelle/base/management/commands/ensure_jsonb.py +++ b/passerelle/base/management/commands/ensure_jsonb.py @@ -37,7 +37,7 @@ class Command(BaseCommand): params = { "schema_name": line[0], 'table_name': table_name, - 'column_name': column_name + 'column_name': column_name, } try: cursor.execute(alter_query % params) diff --git a/passerelle/base/management/commands/export_site.py b/passerelle/base/management/commands/export_site.py index ab1a6102..d5fbd2d1 100644 --- a/passerelle/base/management/commands/export_site.py +++ b/passerelle/base/management/commands/export_site.py @@ -10,10 +10,10 @@ class Command(BaseCommand): help = 'Export the site' def add_arguments(self, parser): - parser.add_argument('--slugs', nargs='+', default=None, - help='specify resources to export') - parser.add_argument('--output', metavar='FILE', default=None, - help='name of a file to write output to') + parser.add_argument('--slugs', nargs='+', default=None, help='specify resources to export') + parser.add_argument( + '--output', metavar='FILE', default=None, help='name of a file to write output to' + ) def handle(self, *args, **options): if options['output']: diff --git a/passerelle/base/management/commands/import_site.py b/passerelle/base/management/commands/import_site.py index 69947cd5..aea03c15 100644 --- a/passerelle/base/management/commands/import_site.py +++ b/passerelle/base/management/commands/import_site.py @@ -9,21 +9,24 @@ class Command(BaseCommand): help = 'Import an exported site' def add_arguments(self, parser): - parser.add_argument('filename', metavar='FILENAME', type=str, - help='name of file to import') - parser.add_argument('--clean', action='store_true', default=False, - help='Clean site before importing') - parser.add_argument('--import-users', action='store_true', default=False, - help='Import users and access rights') - parser.add_argument('--if-empty', action='store_true', default=False, - help='Import only if passerelle is empty') - parser.add_argument('--overwrite', action='store_true', default=False, - help='Overwrite existing resources') + parser.add_argument('filename', metavar='FILENAME', type=str, help='name of file to import') + parser.add_argument('--clean', action='store_true', default=False, help='Clean site before importing') + parser.add_argument( + '--import-users', action='store_true', default=False, help='Import users and access rights' + ) + parser.add_argument( + '--if-empty', action='store_true', default=False, help='Import only if passerelle is empty' + ) + parser.add_argument( + '--overwrite', action='store_true', default=False, help='Overwrite existing resources' + ) def handle(self, filename, **options): with open(filename) as f: - import_site(json.load(f), - if_empty=options['if_empty'], - clean=options['clean'], - overwrite=options['overwrite'], - import_users=options['import_users']) + import_site( + json.load(f), + if_empty=options['if_empty'], + clean=options['clean'], + overwrite=options['overwrite'], + import_users=options['import_users'], + ) diff --git a/passerelle/base/migrations/0001_initial.py b/passerelle/base/migrations/0001_initial.py index 67fa3084..7160fb49 100644 --- a/passerelle/base/migrations/0001_initial.py +++ b/passerelle/base/migrations/0001_initial.py @@ -14,7 +14,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='AccessRight', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('codename', models.CharField(max_length=100, verbose_name=b'codename')), ('resource_pk', models.PositiveIntegerField()), ], @@ -26,27 +29,44 @@ class Migration(migrations.Migration): migrations.CreateModel( name='ApiUser', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('username', models.CharField(max_length=50, verbose_name='Username')), ('fullname', models.CharField(max_length=50, verbose_name='Full Name')), ('description', models.TextField(verbose_name='Description', blank=True)), - ('keytype', models.CharField(blank=True, max_length=4, verbose_name='Key Type', choices=[(b'API', b'API Key'), (b'SIGN', b'HMAC Signature')])), + ( + 'keytype', + models.CharField( + blank=True, + max_length=4, + verbose_name='Key Type', + choices=[(b'API', b'API Key'), (b'SIGN', b'HMAC Signature')], + ), + ), ('key', models.CharField(max_length=256, verbose_name='Key', blank=True)), - ('ipsource', models.GenericIPAddressField(unpack_ipv4=True, null=True, verbose_name='IP Address', blank=True)), + ( + 'ipsource', + models.GenericIPAddressField( + unpack_ipv4=True, null=True, verbose_name='IP Address', blank=True + ), + ), ], - options={ - }, + options={}, bases=(models.Model,), ), migrations.CreateModel( name='TemplateVar', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('name', models.CharField(max_length=64)), ('value', models.CharField(max_length=128)), ], - options={ - }, + options={}, bases=(models.Model,), ), migrations.AddField( diff --git a/passerelle/base/migrations/0005_resourcelog.py b/passerelle/base/migrations/0005_resourcelog.py index b1eac949..4775831b 100644 --- a/passerelle/base/migrations/0005_resourcelog.py +++ b/passerelle/base/migrations/0005_resourcelog.py @@ -15,7 +15,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='ResourceLog', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('timestamp', models.DateTimeField(auto_now_add=True)), ('appname', models.CharField(max_length=128, null=True, verbose_name=b'appname')), ('slug', models.CharField(max_length=128, null=True, verbose_name=b'slug')), diff --git a/passerelle/base/migrations/0006_resourcestatus.py b/passerelle/base/migrations/0006_resourcestatus.py index 0eb064e5..03397282 100644 --- a/passerelle/base/migrations/0006_resourcestatus.py +++ b/passerelle/base/migrations/0006_resourcestatus.py @@ -15,10 +15,20 @@ class Migration(migrations.Migration): migrations.CreateModel( name='ResourceStatus', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('resource_pk', models.PositiveIntegerField()), ('start_timestamp', models.DateTimeField(auto_now_add=True)), - ('status', models.CharField(default=b'unknown', max_length=20, choices=[(b'unknown', 'Unknown'), (b'up', 'Up'), (b'down', 'Down')])), + ( + 'status', + models.CharField( + default=b'unknown', + max_length=20, + choices=[(b'unknown', 'Unknown'), (b'up', 'Up'), (b'down', 'Down')], + ), + ), ('message', models.CharField(max_length=500, blank=True)), ('resource_type', models.ForeignKey(to='contenttypes.ContentType', on_delete=models.CASCADE)), ], diff --git a/passerelle/base/migrations/0007_loggingparameters.py b/passerelle/base/migrations/0007_loggingparameters.py index 5cd96e45..3981731e 100644 --- a/passerelle/base/migrations/0007_loggingparameters.py +++ b/passerelle/base/migrations/0007_loggingparameters.py @@ -17,10 +17,32 @@ class Migration(migrations.Migration): migrations.CreateModel( name='LoggingParameters', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('resource_pk', models.PositiveIntegerField()), - ('log_level', models.CharField(choices=[(b'DEBUG', 'Debug'), (b'INFO', 'Info'), (b'WARNING', 'Warning'), (b'ERROR', 'Error'), (b'CRITICAL', 'Critical')], default=b'INFO', max_length=10, verbose_name='Log level')), - ('resource_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), + ( + 'log_level', + models.CharField( + choices=[ + (b'DEBUG', 'Debug'), + (b'INFO', 'Info'), + (b'WARNING', 'Warning'), + (b'ERROR', 'Error'), + (b'CRITICAL', 'Critical'), + ], + default=b'INFO', + max_length=10, + verbose_name='Log level', + ), + ), + ( + 'resource_type', + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType' + ), + ), ], ), ] diff --git a/passerelle/base/migrations/0008_auto_20181118_0717.py b/passerelle/base/migrations/0008_auto_20181118_0717.py index c74c04b9..d5ee4160 100644 --- a/passerelle/base/migrations/0008_auto_20181118_0717.py +++ b/passerelle/base/migrations/0008_auto_20181118_0717.py @@ -14,8 +14,8 @@ def set_logging_parameters(apps, schema_editor): content_type = ContentType.objects.get_for_model(model) for instance in model.objects.all(): parameters, created = LoggingParameters.objects.get_or_create( - resource_type=content_type, - resource_pk=instance.id) + resource_type=content_type, resource_pk=instance.id + ) parameters.log_level = instance.log_level if parameters.log_level == 'NOTSET': parameters.log_level = 'INFO' diff --git a/passerelle/base/migrations/0010_loggingparameters_trace_emails.py b/passerelle/base/migrations/0010_loggingparameters_trace_emails.py index 0ac9a49a..40634f24 100644 --- a/passerelle/base/migrations/0010_loggingparameters_trace_emails.py +++ b/passerelle/base/migrations/0010_loggingparameters_trace_emails.py @@ -15,6 +15,10 @@ class Migration(migrations.Migration): migrations.AddField( model_name='loggingparameters', name='trace_emails', - field=models.TextField(blank=True, help_text='One address per line (empty for site administrators)', verbose_name='Emails to receive error and critical traces'), + field=models.TextField( + blank=True, + help_text='One address per line (empty for site administrators)', + verbose_name='Emails to receive error and critical traces', + ), ), ] diff --git a/passerelle/base/migrations/0011_auto_20190205_1126.py b/passerelle/base/migrations/0011_auto_20190205_1126.py index 7e823e9e..9b1a20ee 100644 --- a/passerelle/base/migrations/0011_auto_20190205_1126.py +++ b/passerelle/base/migrations/0011_auto_20190205_1126.py @@ -17,12 +17,25 @@ class Migration(migrations.Migration): migrations.CreateModel( name='AvailabilityParameters', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('resource_pk', models.PositiveIntegerField()), - ('run_check', models.BooleanField(default=True, - verbose_name='Run regular availability checks', - help_text='Run an availability check every 5 minutes')), - ('resource_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), + ( + 'run_check', + models.BooleanField( + default=True, + verbose_name='Run regular availability checks', + help_text='Run an availability check every 5 minutes', + ), + ), + ( + 'resource_type', + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType' + ), + ), ], ), migrations.AlterUniqueTogether( diff --git a/passerelle/base/migrations/0012_job.py b/passerelle/base/migrations/0012_job.py index 41f4e5b8..5be1de1d 100644 --- a/passerelle/base/migrations/0012_job.py +++ b/passerelle/base/migrations/0012_job.py @@ -18,7 +18,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Job', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('resource_pk', models.PositiveIntegerField()), ('method_name', models.CharField(max_length=50)), ('natural_id', models.CharField(blank=True, max_length=256, null=True)), @@ -26,9 +29,26 @@ class Migration(migrations.Migration): ('creation_timestamp', models.DateTimeField(auto_now_add=True)), ('update_timestamp', models.DateTimeField(auto_now=True)), ('done_timestamp', models.DateTimeField(null=True)), - ('status', models.CharField(choices=[(b'registered', 'Registered'), (b'running', 'Running'), (b'failed', 'Failed'), (b'completed', 'Completed')], default=b'registered', max_length=20)), + ( + 'status', + models.CharField( + choices=[ + (b'registered', 'Registered'), + (b'running', 'Running'), + (b'failed', 'Failed'), + (b'completed', 'Completed'), + ], + default=b'registered', + max_length=20, + ), + ), ('status_details', django.contrib.postgres.fields.jsonb.JSONField(default={})), - ('resource_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), + ( + 'resource_type', + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType' + ), + ), ], ), ] diff --git a/passerelle/base/migrations/0014_auto_20190820_0914.py b/passerelle/base/migrations/0014_auto_20190820_0914.py index bbc2e8af..89724bdc 100644 --- a/passerelle/base/migrations/0014_auto_20190820_0914.py +++ b/passerelle/base/migrations/0014_auto_20190820_0914.py @@ -16,7 +16,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='availabilityparameters', name='notification_delays', - field=models.TextField(default=b'0', verbose_name='Notification delays', help_text='Increasing delay between error notifications in minutes, ex.: 0,5,10', validators=[passerelle.base.models.validate_notification_delays]), + field=models.TextField( + default=b'0', + verbose_name='Notification delays', + help_text='Increasing delay between error notifications in minutes, ex.: 0,5,10', + validators=[passerelle.base.models.validate_notification_delays], + ), ), migrations.AddField( model_name='resourcestatus', diff --git a/passerelle/base/migrations/0016_auto_20191002_1443.py b/passerelle/base/migrations/0016_auto_20191002_1443.py index ec1c24de..0d7114e6 100644 --- a/passerelle/base/migrations/0016_auto_20191002_1443.py +++ b/passerelle/base/migrations/0016_auto_20191002_1443.py @@ -15,11 +15,19 @@ class Migration(migrations.Migration): migrations.AddField( model_name='loggingparameters', name='requests_max_size', - field=models.PositiveIntegerField(default=5000, help_text='Maximum HTTP request size to log', verbose_name='Requests maximum size'), + field=models.PositiveIntegerField( + default=5000, + help_text='Maximum HTTP request size to log', + verbose_name='Requests maximum size', + ), ), migrations.AddField( model_name='loggingparameters', name='responses_max_size', - field=models.PositiveIntegerField(default=5000, help_text='Maximum HTTP reponse size to log', verbose_name='Responses maximum size'), + field=models.PositiveIntegerField( + default=5000, + help_text='Maximum HTTP reponse size to log', + verbose_name='Responses maximum size', + ), ), ] diff --git a/passerelle/base/migrations/0017_auto_20200310_1806.py b/passerelle/base/migrations/0017_auto_20200310_1806.py index af0b5916..96d7a04a 100644 --- a/passerelle/base/migrations/0017_auto_20200310_1806.py +++ b/passerelle/base/migrations/0017_auto_20200310_1806.py @@ -22,22 +22,52 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='apiuser', name='keytype', - field=models.CharField(blank=True, choices=[('API', 'API Key'), ('SIGN', 'HMAC Signature')], max_length=4, verbose_name='Key Type'), + field=models.CharField( + blank=True, + choices=[('API', 'API Key'), ('SIGN', 'HMAC Signature')], + max_length=4, + verbose_name='Key Type', + ), ), migrations.AlterField( model_name='availabilityparameters', name='notification_delays', - field=models.TextField(default='0', help_text='Increasing delay between error notifications in minutes, ex.: 0,5,10', validators=[passerelle.base.models.validate_notification_delays], verbose_name='Notification delays'), + field=models.TextField( + default='0', + help_text='Increasing delay between error notifications in minutes, ex.: 0,5,10', + validators=[passerelle.base.models.validate_notification_delays], + verbose_name='Notification delays', + ), ), migrations.AlterField( model_name='job', name='status', - field=models.CharField(choices=[('registered', 'Registered'), ('running', 'Running'), ('failed', 'Failed'), ('completed', 'Completed')], default='registered', max_length=20), + field=models.CharField( + choices=[ + ('registered', 'Registered'), + ('running', 'Running'), + ('failed', 'Failed'), + ('completed', 'Completed'), + ], + default='registered', + max_length=20, + ), ), migrations.AlterField( model_name='loggingparameters', name='log_level', - field=models.CharField(choices=[('DEBUG', 'Debug'), ('INFO', 'Info'), ('WARNING', 'Warning'), ('ERROR', 'Error'), ('CRITICAL', 'Critical')], default='INFO', max_length=10, verbose_name='Log level'), + field=models.CharField( + choices=[ + ('DEBUG', 'Debug'), + ('INFO', 'Info'), + ('WARNING', 'Warning'), + ('ERROR', 'Error'), + ('CRITICAL', 'Critical'), + ], + default='INFO', + max_length=10, + verbose_name='Log level', + ), ), migrations.AlterField( model_name='resourcelog', @@ -67,6 +97,10 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='resourcestatus', name='status', - field=models.CharField(choices=[('unknown', 'Unknown'), ('up', 'Up'), ('down', 'Down')], default='unknown', max_length=20), + field=models.CharField( + choices=[('unknown', 'Unknown'), ('up', 'Up'), ('down', 'Down')], + default='unknown', + max_length=20, + ), ), ] diff --git a/passerelle/base/migrations/0018_smslog.py b/passerelle/base/migrations/0018_smslog.py index 1e4b26c4..1f6fc899 100644 --- a/passerelle/base/migrations/0018_smslog.py +++ b/passerelle/base/migrations/0018_smslog.py @@ -15,7 +15,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='SMSLog', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('timestamp', models.DateTimeField(auto_now_add=True)), ('appname', models.CharField(max_length=128, null=True, verbose_name='appname')), ('slug', models.CharField(max_length=128, null=True, verbose_name='slug')), diff --git a/passerelle/base/migrations/0022_auto_20200715_1033.py b/passerelle/base/migrations/0022_auto_20200715_1033.py index a3d2fcbd..d6c8d392 100644 --- a/passerelle/base/migrations/0022_auto_20200715_1033.py +++ b/passerelle/base/migrations/0022_auto_20200715_1033.py @@ -15,6 +15,16 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='job', name='status', - field=models.CharField(choices=[('registered', 'Registered'), ('running', 'Running'), ('failed', 'Failed'), ('restarted', 'Failed and restarted'), ('completed', 'Completed')], default='registered', max_length=20), + field=models.CharField( + choices=[ + ('registered', 'Registered'), + ('running', 'Running'), + ('failed', 'Failed'), + ('restarted', 'Failed and restarted'), + ('completed', 'Completed'), + ], + default='registered', + max_length=20, + ), ), ] diff --git a/passerelle/base/migrations/0023_loggingparameters_log_retention_days.py b/passerelle/base/migrations/0023_loggingparameters_log_retention_days.py index f2b0d4d8..fcd0fe0a 100644 --- a/passerelle/base/migrations/0023_loggingparameters_log_retention_days.py +++ b/passerelle/base/migrations/0023_loggingparameters_log_retention_days.py @@ -15,6 +15,11 @@ class Migration(migrations.Migration): migrations.AddField( model_name='loggingparameters', name='log_retention_days', - field=models.PositiveIntegerField(blank=True, help_text='Number of days to keep logs', null=True, verbose_name='Log retention days'), + field=models.PositiveIntegerField( + blank=True, + help_text='Number of days to keep logs', + null=True, + verbose_name='Log retention days', + ), ), ] diff --git a/passerelle/base/migrations/0024_auto_20201103_1256.py b/passerelle/base/migrations/0024_auto_20201103_1256.py index 62d9d4d0..89412a48 100644 --- a/passerelle/base/migrations/0024_auto_20201103_1256.py +++ b/passerelle/base/migrations/0024_auto_20201103_1256.py @@ -15,11 +15,21 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='loggingparameters', name='requests_max_size', - field=models.PositiveIntegerField(blank=True, help_text='Maximum HTTP request size to log', null=True, verbose_name='Requests maximum size'), + field=models.PositiveIntegerField( + blank=True, + help_text='Maximum HTTP request size to log', + null=True, + verbose_name='Requests maximum size', + ), ), migrations.AlterField( model_name='loggingparameters', name='responses_max_size', - field=models.PositiveIntegerField(blank=True, help_text='Maximum HTTP reponse size to log', null=True, verbose_name='Responses maximum size'), + field=models.PositiveIntegerField( + blank=True, + help_text='Maximum HTTP reponse size to log', + null=True, + verbose_name='Responses maximum size', + ), ), ] diff --git a/passerelle/base/migrations/0026_transaction_id.py b/passerelle/base/migrations/0026_transaction_id.py index 3443b833..d7fbf93c 100644 --- a/passerelle/base/migrations/0026_transaction_id.py +++ b/passerelle/base/migrations/0026_transaction_id.py @@ -12,7 +12,6 @@ class Migration(migrations.Migration): operations = [ migrations.RunSQL( - ["UPDATE base_resourcelog SET transaction_id=(extra->>'transaction_id')::uuid"], - reverse_sql=[] + ["UPDATE base_resourcelog SET transaction_id=(extra->>'transaction_id')::uuid"], reverse_sql=[] ), ] diff --git a/passerelle/base/mixins.py b/passerelle/base/mixins.py index dfddc853..05c19b86 100644 --- a/passerelle/base/mixins.py +++ b/passerelle/base/mixins.py @@ -43,5 +43,3 @@ class ResourceChildViewMixin(object): def get_success_url(self): return self.resource.get_absolute_url() - - diff --git a/passerelle/base/models.py b/passerelle/base/models.py index 5e49687c..1cc5f24b 100644 --- a/passerelle/base/models.py +++ b/passerelle/base/models.py @@ -54,29 +54,34 @@ LOGLEVEL_CHOICES = ( ('CRITICAL', _('Critical')), ) -BASE_EXPORT_FIELDS = (models.TextField, models.CharField, models.SlugField, - models.URLField, models.BooleanField, models.IntegerField, - models.CommaSeparatedIntegerField, models.EmailField, - models.IntegerField, models.PositiveIntegerField, JSONField, - models.FloatField) +BASE_EXPORT_FIELDS = ( + models.TextField, + models.CharField, + models.SlugField, + models.URLField, + models.BooleanField, + models.IntegerField, + models.CommaSeparatedIntegerField, + models.EmailField, + models.IntegerField, + models.PositiveIntegerField, + JSONField, + models.FloatField, +) @six.python_2_unicode_compatible class ApiUser(models.Model): - username = models.CharField(max_length=128, - verbose_name=_('Username'), - unique=True) - fullname = models.CharField(max_length=50, - verbose_name=_('Full Name')) - description = models.TextField(blank=True, - verbose_name=_('Description')) + username = models.CharField(max_length=128, verbose_name=_('Username'), unique=True) + fullname = models.CharField(max_length=50, verbose_name=_('Full Name')) + description = models.TextField(blank=True, verbose_name=_('Description')) - keytype = models.CharField(max_length=4, choices=KEYTYPE_CHOICES, - blank=True, verbose_name=_('Key Type')) + keytype = models.CharField(max_length=4, choices=KEYTYPE_CHOICES, blank=True, verbose_name=_('Key Type')) key = models.CharField(max_length=256, blank=True, verbose_name=_('Key')) - ipsource = models.GenericIPAddressField(blank=True, null=True, unpack_ipv4=True, - verbose_name=_('IP Address')) + ipsource = models.GenericIPAddressField( + blank=True, null=True, unpack_ipv4=True, verbose_name=_('IP Address') + ) def __str__(self): return u'%s <%s>' % (self.fullname, self.username) @@ -112,21 +117,20 @@ class ApiUser(models.Model): class InheritanceManager(ModelUtilsInheritanceManager): - def get_slug(self, slug, request=None): - ''' + """ Returns a resource by its slug Request based access control, if request is present - ''' + """ resource = self.get_subclass(slug=slug) if request and not resource.is_accessible_by(request): raise PermissionDenied return resource def filter_apiuser(self, apiuser): - ''' + """ Returns all resources accessible by apiuser - ''' + """ return self.filter(Q(users=None) | Q(users=apiuser)) @@ -160,14 +164,16 @@ class BaseResource(models.Model): return self.title def get_css_class_name(self): - category = self.category if not hasattr(self.category, '_proxy____args') else self.category._proxy____args[0] + category = ( + self.category if not hasattr(self.category, '_proxy____args') else self.category._proxy____args[0] + ) return "%s %s" % (slugify(category), self._meta.model_name) def is_accessible_by(self, request): if request.user.is_superuser: return True restricted = self.users.all() - return not restricted or request.apiuser in restricted + return not restricted or request.apiuser in restricted @classmethod def is_enabled(cls): @@ -179,18 +185,15 @@ class BaseResource(models.Model): @classmethod def get_manager_form_class(cls, **kwargs): - ''' + """ Return the class to use for new/edit connector forms. - ''' - form_class = modelform_factory( - cls, - form=cls.manager_form_base_class, - **kwargs) + """ + form_class = modelform_factory(cls, form=cls.manager_form_base_class, **kwargs) for field in form_class.base_fields.values(): if isinstance(field.widget, ClearableFileInput): - field.widget.template_with_initial = ''\ - '%(initial_text)s: %(initial)s '\ - '%(clear_template)s
%(input_text)s: %(input)s' + field.widget.template_with_initial = ( + '' '%(initial_text)s: %(initial)s ' '%(clear_template)s
%(input_text)s: %(input)s' + ) return form_class @property @@ -203,13 +206,9 @@ class BaseResource(models.Model): def logging_parameters(self): resource_type = ContentType.objects.get_for_model(self) try: - return LoggingParameters.objects.get( - resource_type=resource_type, - resource_pk=self.id) + return LoggingParameters.objects.get(resource_type=resource_type, resource_pk=self.id) except LoggingParameters.DoesNotExist: - return LoggingParameters( - resource_type=resource_type, - resource_pk=self.id) + return LoggingParameters(resource_type=resource_type, resource_pk=self.id) @property def log_level(self): @@ -224,13 +223,9 @@ class BaseResource(models.Model): def availability_parameters(self): resource_type = ContentType.objects.get_for_model(self) try: - return AvailabilityParameters.objects.get( - resource_type=resource_type, - resource_pk=self.id) + return AvailabilityParameters.objects.get(resource_type=resource_type, resource_pk=self.id) except AvailabilityParameters.DoesNotExist: - return AvailabilityParameters( - resource_type=resource_type, - resource_pk=self.id) + return AvailabilityParameters(resource_type=resource_type, resource_pk=self.id) def soap_client(self, **kwargs): return passerelle.utils.soap.SOAPClient(resource=self, **kwargs) @@ -244,20 +239,17 @@ class BaseResource(models.Model): return cls._meta.app_label.replace('_', '-') def get_absolute_url(self): - return reverse('view-connector', - kwargs={'connector': self.get_connector_slug(), 'slug': self.slug}) + return reverse('view-connector', kwargs={'connector': self.get_connector_slug(), 'slug': self.slug}) @classmethod def get_add_url(cls): return reverse('create-connector', kwargs={'connector': cls.get_connector_slug()}) def get_edit_url(self): - return reverse('edit-connector', - kwargs={'connector': self.get_connector_slug(), 'slug': self.slug}) + return reverse('edit-connector', kwargs={'connector': self.get_connector_slug(), 'slug': self.slug}) def get_delete_url(self): - return reverse('delete-connector', - kwargs={'connector': self.get_connector_slug(), 'slug': self.slug}) + return reverse('delete-connector', kwargs={'connector': self.get_connector_slug(), 'slug': self.slug}) def get_description_fields(self): fields = [] @@ -295,7 +287,15 @@ class BaseResource(models.Model): endpoint_info = copy.copy(method.endpoint_info) endpoint_info.http_method = http_method endpoints.append(endpoint_info) - endpoints.sort(key=lambda x: (x.display_category_order, x.display_category, x.display_order or 99999999, x.name or '', x.pattern or '')) + endpoints.sort( + key=lambda x: ( + x.display_category_order, + x.display_category, + x.display_order or 99999999, + x.name or '', + x.pattern or '', + ) + ) if hasattr(self, 'queries'): self.append_custom_queries(endpoints) return endpoints @@ -310,21 +310,23 @@ class BaseResource(models.Model): for endpoint_info in self.get_endpoints_infos(): permission = endpoint_info.perm if permission: - perms[permission] = getattr(self, + perms[permission] = getattr( + self, '_%s_description' % permission, - _('Access (%s) is limited to the following API users:') % permission) + _('Access (%s) is limited to the following API users:') % permission, + ) return [{'key': x[0], 'label': x[1]} for x in perms.items()] def get_availability_status(self): resource_type = ContentType.objects.get_for_model(self) current_status = ResourceStatus.objects.filter( - resource_type=resource_type, - resource_pk=self.pk).first() + resource_type=resource_type, resource_pk=self.pk + ).first() return current_status def down(self): status = self.get_availability_status() - return (status and status.down()) + return status and status.down() @endpoint(description=_('Check service availability'), display_order=-1) def up(self, request, **kwargs): @@ -335,28 +337,27 @@ class BaseResource(models.Model): def export_json(self): d = { '@type': 'passerelle-resource', - 'resource_type': '%s.%s' % (self.__class__._meta.app_label, - self.__class__._meta.model_name), + 'resource_type': '%s.%s' % (self.__class__._meta.app_label, self.__class__._meta.model_name), 'title': self.title, 'slug': self.slug, 'description': self.description, 'log_level': self.log_level, - 'access_rights': [] + 'access_rights': [], } resource_type = ContentType.objects.get_for_model(self) - for ar in AccessRight.objects.filter(resource_type=resource_type, - resource_pk=self.pk).select_related(): - d['access_rights'].append({ - 'codename': ar.codename, - 'apiuser': ar.apiuser.username, - }) - concrete_fields = [ - f for f in self.__class__._meta.get_fields() - if f.concrete and ( - not f.is_relation - or f.one_to_one - or (f.many_to_one and f.related_model) + for ar in AccessRight.objects.filter( + resource_type=resource_type, resource_pk=self.pk + ).select_related(): + d['access_rights'].append( + { + 'codename': ar.codename, + 'apiuser': ar.apiuser.username, + } ) + concrete_fields = [ + f + for f in self.__class__._meta.get_fields() + if f.concrete and (not f.is_relation or f.one_to_one or (f.many_to_one and f.related_model)) ] for field in concrete_fields: if field.name == 'id': @@ -375,8 +376,9 @@ class BaseResource(models.Model): elif isinstance(field, SFTPField): d[field.name] = value and value.__json__() else: - raise Exception('export_json: field %s of ressource class %s is unsupported' % ( - field, self.__class__)) + raise Exception( + 'export_json: field %s of ressource class %s is unsupported' % (field, self.__class__) + ) return d @staticmethod @@ -406,7 +408,8 @@ class BaseResource(models.Model): codename=ar['codename'], resource_type=resource_type, resource_pk=instance.pk, - apiuser=apiuser) + apiuser=apiuser, + ) return instance @classmethod @@ -423,12 +426,9 @@ class BaseResource(models.Model): else: instance = cls(**init_kwargs) concrete_fields = [ - f for f in cls._meta.get_fields() - if f.concrete and ( - not f.is_relation - or f.one_to_one - or (f.many_to_one and f.related_model) - ) + f + for f in cls._meta.get_fields() + if f.concrete and (not f.is_relation or f.one_to_one or (f.many_to_one and f.related_model)) ] for field in concrete_fields: if field.name == 'id': @@ -439,16 +439,16 @@ class BaseResource(models.Model): elif isinstance(field, models.FileField): if value: getattr(instance, field.attname).save( - value['name'], - ContentFile(base64.b64decode(value['content'])), - save=False) + value['name'], ContentFile(base64.b64decode(value['content'])), save=False + ) elif isinstance(field, SFTPField): if value: value = SFTP(**value) setattr(instance, field.attname, value) else: - raise Exception('import_json_real: field %s of ressource class ' - '%s is unsupported' % (field, cls)) + raise Exception( + 'import_json_real: field %s of ressource class ' '%s is unsupported' % (field, cls) + ) instance.save() if 'log_level' in d: instance.set_log_level(d['log_level']) @@ -457,15 +457,16 @@ class BaseResource(models.Model): def clean_logs(self): # clean logs timestamp = timezone.now() - datetime.timedelta( - days=self.logging_parameters.log_retention_days or settings.LOG_RETENTION_DAYS) + days=self.logging_parameters.log_retention_days or settings.LOG_RETENTION_DAYS + ) ResourceLog.objects.filter( - appname=self.get_connector_slug(), - slug=self.slug, - timestamp__lt=timestamp).delete() + appname=self.get_connector_slug(), slug=self.slug, timestamp__lt=timestamp + ).delete() def check_status(self): # should raise an exception if status is not ok raise NotImplementedError + check_status.not_implemented = True def availability(self): @@ -491,29 +492,34 @@ class BaseResource(models.Model): resource_type = ContentType.objects.get_for_model(self) current_status = ResourceStatus.objects.filter( - resource_type=resource_type, - resource_pk=self.pk).first() + resource_type=resource_type, resource_pk=self.pk + ).first() if not current_status or status != current_status.status: if status == 'down' and not self.down(): # new downtime if availability_parameters.has_zero_delay(): - self.logger.error(u'connector "%s" (%s) is now down: %s', self, self.__class__.__name__, message) + self.logger.error( + u'connector "%s" (%s) is now down: %s', self, self.__class__.__name__, message + ) else: - self.logger.warning(u'connector "%s" (%s) is now down: %s', self, self.__class__.__name__, message) + self.logger.warning( + u'connector "%s" (%s) is now down: %s', self, self.__class__.__name__, message + ) ResourceStatus( - resource_type=resource_type, - resource_pk=self.pk, - status=status, - message=message).save() + resource_type=resource_type, resource_pk=self.pk, status=status, message=message + ).save() if status == 'up' and current_status: self.logger.info(u'connector "%s" (%s) is back up', self, self.__class__.__name__) elif status == 'down': # check last_notification_downtime and current downtime to see if it matches a new notification delay - last_notification_timestamp = current_status.last_notification_timestamp or current_status.start_timestamp + last_notification_timestamp = ( + current_status.last_notification_timestamp or current_status.start_timestamp + ) current_time = now() downtime = (current_time - current_status.start_timestamp).total_seconds() // 60 last_notification_downtime = ( - last_notification_timestamp - current_status.start_timestamp).total_seconds() // 60 + last_notification_timestamp - current_status.start_timestamp + ).total_seconds() // 60 for delay in availability_parameters.notification_delays_generator(): if not delay: @@ -528,22 +534,24 @@ class BaseResource(models.Model): human_duration = 'for %d hours' % hours else: human_duration = 'for %d minutes' % downtime - self.logger.error(u'connector "%s" (%s) has been down %s: %s', - self, self.__class__.__name__, - human_duration, - message, - # when connector is down, logging is shutdown - force=True) + self.logger.error( + u'connector "%s" (%s) has been down %s: %s', + self, + self.__class__.__name__, + human_duration, + message, + # when connector is down, logging is shutdown + force=True, + ) ResourceStatus.objects.filter(pk=current_status.pk).update( - message=message, last_notification_timestamp=current_time) + message=message, last_notification_timestamp=current_time + ) break else: - ResourceStatus.objects.filter(pk=current_status.pk).update( - message=message) + ResourceStatus.objects.filter(pk=current_status.pk).update(message=message) break else: - ResourceStatus.objects.filter(pk=current_status.pk).update( - message=message) + ResourceStatus.objects.filter(pk=current_status.pk).update(message=message) def hourly(self): pass @@ -559,9 +567,7 @@ class BaseResource(models.Model): def jobs_set(self): resource_type = ContentType.objects.get_for_model(self) - return Job.objects.filter( - resource_type=resource_type, - resource_pk=self.pk) + return Job.objects.filter(resource_type=resource_type, resource_pk=self.pk) def jobs(self): # "jobs" cron job to run asynchronous tasks @@ -574,13 +580,17 @@ class BaseResource(models.Model): skipped_jobs = [] while True: with transaction.atomic(): - job = self.jobs_set().exclude( - pk__in=skipped_jobs - ).filter( + job = ( + self.jobs_set() + .exclude(pk__in=skipped_jobs) + .filter( Q(after_timestamp__isnull=True) | Q(after_timestamp__lt=timezone.now()), - status='registered' - ).select_for_update(**skip_locked - ).order_by('pk')[:1].first() + status='registered', + ) + .select_for_update(**skip_locked) + .order_by('pk')[:1] + .first() + ) if not job: break job.status = 'running' @@ -592,11 +602,13 @@ class BaseResource(models.Model): def add_job(self, method_name, natural_id=None, after_timestamp=None, **kwargs): resource_type = ContentType.objects.get_for_model(self) - job = Job(resource_type=resource_type, - resource_pk=self.pk, - method_name=method_name, - natural_id=natural_id, - parameters=kwargs) + job = Job( + resource_type=resource_type, + resource_pk=self.pk, + method_name=method_name, + natural_id=natural_id, + parameters=kwargs, + ) job.set_after_timestamp(after_timestamp) job.save() transaction.on_commit(lambda: job.run(spool=True)) @@ -604,23 +616,21 @@ class BaseResource(models.Model): def handle_job_error(self, job, exc_info): from passerelle.utils.conversion import exception_to_text + (exc_type, exc_value, tb) = exc_info job.status = 'failed' job.done_timestamp = timezone.now() job.status_details = { 'error_summary': '\n'.join(traceback.format_exception_only(exc_type, exc_value)).strip(), } - self.logger.error('error running %s job (%s)', - job.method_name, - exception_to_text(exc_value), - exc_info=exc_info) + self.logger.error( + 'error running %s job (%s)', job.method_name, exception_to_text(exc_value), exc_info=exc_info + ) @property def has_open_access_right(self): return AccessRight.objects.filter( - resource_type=ContentType.objects.get_for_model(self), - resource_pk=self.pk, - apiuser__key='' + resource_type=ContentType.objects.get_for_model(self), resource_pk=self.pk, apiuser__key='' ).exists() @@ -633,15 +643,16 @@ class AccessRight(models.Model): apiuser = models.ForeignKey(ApiUser, verbose_name=_('API User'), on_delete=models.CASCADE) class Meta: - permissions = ( - ('see_accessright', 'Can see access right'), - ) - unique_together = ( - ('codename', 'resource_type', 'resource_pk', 'apiuser'), - ) + permissions = (('see_accessright', 'Can see access right'),) + unique_together = (('codename', 'resource_type', 'resource_pk', 'apiuser'),) def __str__(self): - return '%s (on %s <%s>) (for %s)' % (self.codename, self.resource_type, self.resource_pk, self.apiuser) + return '%s (on %s <%s>) (for %s)' % ( + self.codename, + self.resource_type, + self.resource_pk, + self.apiuser, + ) class LoggingParameters(models.Model): @@ -649,37 +660,34 @@ class LoggingParameters(models.Model): resource_pk = models.PositiveIntegerField() resource = fields.GenericForeignKey('resource_type', 'resource_pk') log_level = models.CharField( - verbose_name=_('Log level'), - max_length=10, - choices=LOGLEVEL_CHOICES, - default='INFO' + verbose_name=_('Log level'), max_length=10, choices=LOGLEVEL_CHOICES, default='INFO' ) trace_emails = models.TextField( verbose_name=_('Emails to receive error and critical traces'), help_text=_('One address per line (empty for site administrators)'), - blank=True + blank=True, ) requests_max_size = models.PositiveIntegerField( verbose_name=_('Requests maximum size'), help_text=_('Maximum HTTP request size to log'), blank=True, - null=True + null=True, ) responses_max_size = models.PositiveIntegerField( verbose_name=_('Responses maximum size'), help_text=_('Maximum HTTP reponse size to log'), blank=True, - null=True + null=True, ) log_retention_days = models.PositiveIntegerField( verbose_name=_('Log retention days'), help_text=_('Number of days to keep logs'), blank=True, - null=True + null=True, ) class Meta: - unique_together = (('resource_type', 'resource_pk')) + unique_together = ('resource_type', 'resource_pk') def parse_notification_delays(value): @@ -707,14 +715,17 @@ class AvailabilityParameters(models.Model): resource_pk = models.PositiveIntegerField() resource = fields.GenericForeignKey('resource_type', 'resource_pk') run_check = models.BooleanField( - default=True, verbose_name=_('Run regular availability checks'), - help_text=_('Run an availability check every 5 minutes')) + default=True, + verbose_name=_('Run regular availability checks'), + help_text=_('Run an availability check every 5 minutes'), + ) notification_delays = models.TextField( verbose_name=_('Notification delays'), default='0', blank=False, validators=[validate_notification_delays], - help_text=_('Increasing delay between error notifications in minutes, ex.: 0,5,10')) + help_text=_('Increasing delay between error notifications in minutes, ex.: 0,5,10'), + ) def has_zero_delay(self): return 0 in parse_notification_delays(self.notification_delays) @@ -724,12 +735,12 @@ class AvailabilityParameters(models.Model): last_notification_delay = notification_delays[-1] if last_notification_delay > 1: notification_delays = itertools.chain( - notification_delays, - itertools.count(2 * last_notification_delay, last_notification_delay)) + notification_delays, itertools.count(2 * last_notification_delay, last_notification_delay) + ) return notification_delays class Meta: - unique_together = (('resource_type', 'resource_pk')) + unique_together = ('resource_type', 'resource_pk') class SkipJob(Exception): @@ -750,15 +761,16 @@ class Job(models.Model): done_timestamp = models.DateTimeField(null=True) after_timestamp = models.DateTimeField(null=True) status = models.CharField( - max_length=20, - default='registered', - choices=(('registered', _('Registered')), - ('running', _('Running')), - ('failed', _('Failed')), - ('restarted', _('Failed and restarted')), - ('completed', _('Completed')) - ), - ) + max_length=20, + default='registered', + choices=( + ('registered', _('Registered')), + ('running', _('Running')), + ('failed', _('Failed')), + ('restarted', _('Failed and restarted')), + ('completed', _('Completed')), + ), + ) status_details = JSONField(default={}) class Meta: @@ -794,6 +806,7 @@ class Job(models.Model): if spool and self.pk: if 'uwsgi' in sys.modules and settings.PASSERELLE_MANAGE_COMMAND: from passerelle.utils.spooler import run_job + tenant = getattr(connection, 'tenant', None) run_job.spool(job_id=str(self.pk), domain=getattr(tenant, 'domain_url', None)) return @@ -828,9 +841,7 @@ class ResourceLog(models.Model): class Meta: ordering = ('id',) - permissions = ( - ('see_resourcelog', 'Can see resource logs'), - ) + permissions = (('see_resourcelog', 'Can see resource logs'),) @property def level(self): @@ -854,6 +865,7 @@ STATUS_CHOICES = ( ('down', _('Down')), ) + class ResourceStatus(models.Model): resource_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) resource_pk = models.PositiveIntegerField() @@ -916,8 +928,7 @@ class ProxyLogger(object): rl_message = message rl_args = args # Borrowed from python stdlib logging/__init__.py - if (rl_args and len(rl_args) == 1 and isinstance(rl_args[0], collections.Mapping) - and rl_args[0]): + if rl_args and len(rl_args) == 1 and isinstance(rl_args[0], collections.Mapping) and rl_args[0]: rl_args = rl_args[0] # End Borrow if rl_args: @@ -933,6 +944,7 @@ class ProxyLogger(object): def is_json_serializable(value): return isinstance(value, (list, dict, bool) + six.integer_types + six.string_types) + attr['extra'] = {key: value for key, value in extra.items() if is_json_serializable(value)} if self.transaction_id: @@ -985,32 +997,21 @@ class ProxyLogger(object): class HTTPResource(models.Model): - '''Mixin to add basic TLS/Basic HTTP authentication fields to any - resource.''' + """Mixin to add basic TLS/Basic HTTP authentication fields to any + resource.""" + basic_auth_username = models.CharField( - max_length=128, - verbose_name=_('Basic authentication username'), - blank=True) + max_length=128, verbose_name=_('Basic authentication username'), blank=True + ) basic_auth_password = models.CharField( - max_length=128, - verbose_name=_('Basic authentication password'), - blank=True) - client_certificate = models.FileField( - verbose_name=_('TLS client certificate'), - null=True, - blank=True) + max_length=128, verbose_name=_('Basic authentication password'), blank=True + ) + client_certificate = models.FileField(verbose_name=_('TLS client certificate'), null=True, blank=True) trusted_certificate_authorities = models.FileField( - verbose_name=_('TLS trusted CAs'), - null=True, - blank=True) - verify_cert = models.BooleanField( - verbose_name=_('TLS verify certificates'), - default=True, - blank=True) - http_proxy = models.CharField( - max_length=128, - verbose_name=_('HTTP and HTTPS proxy'), - blank=True) + verbose_name=_('TLS trusted CAs'), null=True, blank=True + ) + verify_cert = models.BooleanField(verbose_name=_('TLS verify certificates'), default=True, blank=True) + http_proxy = models.CharField(max_length=128, verbose_name=_('HTTP and HTTPS proxy'), blank=True) class Meta: abstract = True @@ -1018,21 +1019,15 @@ class HTTPResource(models.Model): @six.python_2_unicode_compatible class BaseQuery(models.Model): - '''Base for building custom queries. + """Base for building custom queries. It must define "resource" attribute as a ForeignKey to a BaseResource subclass, and probably extend its "as_endpoint" method to document its parameters. - ''' + """ - name = models.CharField( - verbose_name=_('Name'), - max_length=128) - slug = models.SlugField( - verbose_name=_('Slug'), - max_length=128) - description = models.TextField( - verbose_name=_('Description'), - blank=True) + name = models.CharField(verbose_name=_('Name'), max_length=128) + slug = models.SlugField(verbose_name=_('Slug'), max_length=128) + description = models.TextField(verbose_name=_('Description'), blank=True) http_method = 'get' @@ -1058,12 +1053,11 @@ class BaseQuery(models.Model): def export_json(self): d = {} fields = [ - f for f in self.__class__._meta.get_fields() - if f.concrete and ( - not f.is_relation - or f.one_to_one - or (f.many_to_one and f.related_model) - ) and f.name not in ['id', 'resource'] + f + for f in self.__class__._meta.get_fields() + if f.concrete + and (not f.is_relation or f.one_to_one or (f.many_to_one and f.related_model)) + and f.name not in ['id', 'resource'] ] for field in fields: d[field.name] = getattr(self, field.name) @@ -1074,9 +1068,7 @@ class BaseQuery(models.Model): return cls(**d) def delete_url(self): - return reverse(self.delete_view, - kwargs={'slug': self.resource.slug, 'pk': self.pk}) + return reverse(self.delete_view, kwargs={'slug': self.resource.slug, 'pk': self.pk}) def edit_url(self): - return reverse(self.edit_view, - kwargs={'slug': self.resource.slug, 'pk': self.pk}) + return reverse(self.edit_view, kwargs={'slug': self.resource.slug, 'pk': self.pk}) diff --git a/passerelle/base/signature.py b/passerelle/base/signature.py index d879623e..30b936f3 100644 --- a/passerelle/base/signature.py +++ b/passerelle/base/signature.py @@ -29,9 +29,7 @@ def sign_query(query, key, algo='sha256', timestamp=None, nonce=None): new_query = query if new_query: new_query += '&' - new_query += urlencode(( - ('algo', algo), - ('timestamp', timestamp))) + new_query += urlencode((('algo', algo), ('timestamp', timestamp))) if nonce: # we don't add nonce if it's an empty string new_query += '&nonce=' + quote(nonce) signature = base64.b64encode(sign_string(new_query, key, algo=algo)) @@ -54,8 +52,7 @@ def check_url(url, key, known_nonce=None, timedelta=30): def check_query(query, key, known_nonce=None, timedelta=30): parsed = urlparse.parse_qs(query) - if not ('signature' in parsed and 'algo' in parsed and - 'timestamp' in parsed): + if not ('signature' in parsed and 'algo' in parsed and 'timestamp' in parsed): return False if known_nonce is not None: if ('nonce' not in parsed) or known_nonce(parsed['nonce'][0]): diff --git a/passerelle/base/templatetags/passerelle.py b/passerelle/base/templatetags/passerelle.py index c8094d66..38207f53 100644 --- a/passerelle/base/templatetags/passerelle.py +++ b/passerelle/base/templatetags/passerelle.py @@ -38,7 +38,9 @@ register = template.Library() @register.inclusion_tag('passerelle/includes/access-rights-table.html', takes_context=True) def access_rights_table(context, resource, permission): resource_type = ContentType.objects.get_for_model(resource) - rights = AccessRight.objects.filter(resource_type=resource_type, resource_pk=resource.id, codename=permission) + rights = AccessRight.objects.filter( + resource_type=resource_type, resource_pk=resource.id, codename=permission + ) context['permission'] = permission context['access_rights_list'] = rights context['resource_type'] = resource_type.id @@ -156,8 +158,7 @@ def render_json_schema(schema): max_length = schema.pop('maxLength', '') pattern = schema.pop('pattern', '') if enum: - enum = mark_safe(' | '.join( - [format_html('{}', json.dumps(el)) for el in enum])) + enum = mark_safe(' | '.join([format_html('{}', json.dumps(el)) for el in enum])) s = 'string' if max_length or min_length: s += format_html('[{0}:{1}]', min_length, max_length) @@ -192,7 +193,9 @@ def render_json_schema(schema): if merge_extra: s += format_html(', {}', _('merge extra')) if not additional_properties: - s += format_html(', {}', _('no additional properties')) + s += format_html( + ', {}', _('no additional properties') + ) if title: s += format_html(', {}', title) if schema: @@ -250,4 +253,3 @@ def render_body_schemas(body_schemas): s += format_html('
  • {0}
  • ', key) s += mark_safe('
      ') return mark_safe(s) - diff --git a/passerelle/base/urls.py b/passerelle/base/urls.py index 6664e3b8..0e1cd429 100644 --- a/passerelle/base/urls.py +++ b/passerelle/base/urls.py @@ -1,25 +1,39 @@ from django.conf.urls import url -from .views import ApiUserCreateView, ApiUserUpdateView, ApiUserDeleteView, \ - ApiUserListView, AccessRightDeleteView, AccessRightCreateView, \ - LoggingParametersUpdateView, ManageAvailabilityView, ImportSiteView, \ - ExportSiteView +from .views import ( + ApiUserCreateView, + ApiUserUpdateView, + ApiUserDeleteView, + ApiUserListView, + AccessRightDeleteView, + AccessRightCreateView, + LoggingParametersUpdateView, + ManageAvailabilityView, + ImportSiteView, + ExportSiteView, +) access_urlpatterns = [ url(r'^$', ApiUserListView.as_view(), name='apiuser-list'), url(r'^add$', ApiUserCreateView.as_view(), name='apiuser-add'), url(r'^(?P[\w,-]+)/edit$', ApiUserUpdateView.as_view(), name='apiuser-edit'), url(r'^(?P[\w,-]+)/delete$', ApiUserDeleteView.as_view(), name='apiuser-delete'), - - url(r'^(?P[\w,-]+)/remove$', AccessRightDeleteView.as_view(), - name='access-right-remove'), - url(r'^accessright/add/(?P[\w,-]+)/(?P[\w,-]+)/(?P[\w,-]+)/', - AccessRightCreateView.as_view(), name='access-right-add'), - url(r'logging/parameters/(?P[\w,-]+)/(?P[\w,-]+)/$', - LoggingParametersUpdateView.as_view(), name='logging-parameters'), - url(r'manage/availability/(?P[\w,-]+)/(?P[\w,-]+)/$', - ManageAvailabilityView.as_view(), name='manage-availability') - + url(r'^(?P[\w,-]+)/remove$', AccessRightDeleteView.as_view(), name='access-right-remove'), + url( + r'^accessright/add/(?P[\w,-]+)/(?P[\w,-]+)/(?P[\w,-]+)/', + AccessRightCreateView.as_view(), + name='access-right-add', + ), + url( + r'logging/parameters/(?P[\w,-]+)/(?P[\w,-]+)/$', + LoggingParametersUpdateView.as_view(), + name='logging-parameters', + ), + url( + r'manage/availability/(?P[\w,-]+)/(?P[\w,-]+)/$', + ManageAvailabilityView.as_view(), + name='manage-availability', + ), ] import_export_urlpatterns = [ diff --git a/passerelle/base/views.py b/passerelle/base/views.py index 04f40dd5..3eee2a57 100644 --- a/passerelle/base/views.py +++ b/passerelle/base/views.py @@ -25,8 +25,7 @@ from django.urls import reverse from django.core.exceptions import ObjectDoesNotExist, PermissionDenied from django.db.models import Q from django.forms import models as model_forms -from django.views.generic import ( - View, DetailView, ListView, CreateView, UpdateView, DeleteView, FormView) +from django.views.generic import View, DetailView, ListView, CreateView, UpdateView, DeleteView, FormView from django.http import Http404, HttpResponse, HttpResponseRedirect from django.shortcuts import get_object_or_404 from django.utils.timezone import make_aware @@ -56,10 +55,9 @@ class ResourceView(DetailView): context = super(ResourceView, self).get_context_data(**kwargs) context['site_base_uri'] = '%s://%s' % ( 'https' if self.request.is_secure() else 'http', - self.request.get_host()) - context['absolute_uri'] = '%s%s' % ( - context['site_base_uri'], - self.request.path) + self.request.get_host(), + ) + context['absolute_uri'] = '%s%s' % (context['site_base_uri'], self.request.path) return context @@ -147,8 +145,14 @@ class LoggingParametersUpdateView(FormView): def get_form_class(self): form_class = model_forms.modelform_factory( LoggingParameters, - fields=['log_level', 'trace_emails', 'requests_max_size', 'responses_max_size', - 'log_retention_days']) + fields=[ + 'log_level', + 'trace_emails', + 'requests_max_size', + 'responses_max_size', + 'log_retention_days', + ], + ) form_class.base_fields['trace_emails'].widget.attrs['rows'] = '3' return form_class @@ -211,10 +215,8 @@ class ManageAvailabilityView(UpdateView): if not form.instance.run_check and resource.down(): resource_type = ContentType.objects.get_for_model(resource) ResourceStatus( - resource_type=resource_type, - resource_pk=self.kwargs['resource_pk'], - status='up', - message='').save() + resource_type=resource_type, resource_pk=self.kwargs['resource_pk'], status='up', message='' + ).save() # log changes to notification delays if 'notification_delays' in form.changed_data: @@ -222,7 +224,9 @@ class ManageAvailabilityView(UpdateView): # log changes to run_check, if enabled immediately check for availability if 'run_check' in form.changed_data: - resource.logger.info(u'availability checks %s', 'enabled' if form.instance.run_check else 'disabled') + resource.logger.info( + u'availability checks %s', 'enabled' if form.instance.run_check else 'disabled' + ) if form.instance.run_check: resource.availability() @@ -242,9 +246,7 @@ class GenericViewJobsConnectorView(GenericConnectorMixin, ListView): try: resource_type = ContentType.objects.get_for_model(connector) context['job_target'] = Job.objects.get( - resource_type=resource_type, - resource_pk=connector.pk, - pk=self.request.GET['job_id'] + resource_type=resource_type, resource_pk=connector.pk, pk=self.request.GET['job_id'] ) except (ValueError, Job.DoesNotExist): pass @@ -267,12 +269,17 @@ class GenericViewJobsConnectorView(GenericConnectorMixin, ListView): if date.hour == 0 and date.minute == 0 and date.second == 0: # just a date: display all jobs for that date max_date = date + datetime.timedelta(days=1) - qs = qs.filter(Q(creation_timestamp__gte=date, - creation_timestamp__lte=date + datetime.timedelta(days=1)) | - Q(update_timestamp__gte=date, - update_timestamp__lte=date + datetime.timedelta(days=1)) | - Q(done_timestamp__gte=date, - done_timestamp__lte=date + datetime.timedelta(days=1))) + qs = qs.filter( + Q( + creation_timestamp__gte=date, + creation_timestamp__lte=date + datetime.timedelta(days=1), + ) + | Q( + update_timestamp__gte=date, + update_timestamp__lte=date + datetime.timedelta(days=1), + ) + | Q(done_timestamp__gte=date, done_timestamp__lte=date + datetime.timedelta(days=1)) + ) elif date.second == 0: # without seconds: display all jobs in this minute max_date = date + datetime.timedelta(seconds=60) @@ -280,12 +287,11 @@ class GenericViewJobsConnectorView(GenericConnectorMixin, ListView): # display all jobs in the same second max_date = date + datetime.timedelta(seconds=1) - qs = qs.filter(Q(creation_timestamp__gte=date, - creation_timestamp__lte=max_date) | - Q(update_timestamp__gte=date, - update_timestamp__lte=max_date) | - Q(update_timestamp__gte=date, - update_timestamp__lte=max_date)) + qs = qs.filter( + Q(creation_timestamp__gte=date, creation_timestamp__lte=max_date) + | Q(update_timestamp__gte=date, update_timestamp__lte=max_date) + | Q(update_timestamp__gte=date, update_timestamp__lte=max_date) + ) return qs @@ -306,10 +312,17 @@ class GenericRestartJobView(GenericConnectorMixin, View): def get(self, request, *args, **kwargs): connector = get_object_or_404(self.model, slug=kwargs['slug']) resource_type = ContentType.objects.get_for_model(connector) - job = get_object_or_404(Job, pk=self.kwargs['job_pk'], resource_type=resource_type, resource_pk=connector.pk, status='failed') + job = get_object_or_404( + Job, + pk=self.kwargs['job_pk'], + resource_type=resource_type, + resource_pk=connector.pk, + status='failed', + ) job.restart() return HttpResponseRedirect( - reverse('view-jobs-connector', kwargs={'connector': kwargs['connector'], 'slug': kwargs['slug']})) + reverse('view-jobs-connector', kwargs={'connector': kwargs['connector'], 'slug': kwargs['slug']}) + ) class ImportSiteView(FormView): @@ -326,13 +339,11 @@ class ImportSiteView(FormView): form.add_error('site_json', _('File is not in the expected JSON format.')) return self.form_invalid(form) - results = import_site(site_json, overwrite=True, - import_users=form.cleaned_data['import_users']) + results = import_site(site_json, overwrite=True, import_users=form.cleaned_data['import_users']) return super(ImportSiteView, self).form_valid(form) class ExportSiteView(View): - def get(self, request, *args, **kwargs): response = HttpResponse(content_type='application/json') today = datetime.date.today() diff --git a/passerelle/contrib/adict/migrations/0001_initial.py b/passerelle/contrib/adict/migrations/0001_initial.py index e02983ff..7247bc8a 100644 --- a/passerelle/contrib/adict/migrations/0001_initial.py +++ b/passerelle/contrib/adict/migrations/0001_initial.py @@ -14,15 +14,38 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Adict', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), ('service_root_url', models.URLField(max_length=256, verbose_name='Service Root URL')), ('sector_type', models.CharField(max_length=256, verbose_name='Sector Type')), ('api_token', models.CharField(max_length=256, verbose_name='API Token')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_adict_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_adict_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': 'ADICT (Strasbourg GIS)', diff --git a/passerelle/contrib/adict/models.py b/passerelle/contrib/adict/models.py index 93885b71..6f454592 100644 --- a/passerelle/contrib/adict/models.py +++ b/passerelle/contrib/adict/models.py @@ -30,24 +30,28 @@ class Adict(BaseResource): class Meta: verbose_name = _('ADICT (Strasbourg GIS)') - @endpoint(perm='can_access', - description=_('Get feature info'), - parameters={ - 'lat': {'description': _('Latitude'), 'example_value': '48.5704728777251'}, - 'lon': {'description': _('Longitude'), 'example_value': '7.75659804140393'}, - }) + @endpoint( + perm='can_access', + description=_('Get feature info'), + parameters={ + 'lat': {'description': _('Latitude'), 'example_value': '48.5704728777251'}, + 'lon': {'description': _('Longitude'), 'example_value': '7.75659804140393'}, + }, + ) def feature_info(self, request, lat, lon): params = query_args = {'x': lon, 'y': lat, 'srid': '4326'} query_args['token'] = self.api_token query_args['sector_type'] = self.sector_type - response = self.requests.get(self.service_root_url.strip('/') + '/api/v1.0/secteurs', - params={ - 'x': lon, - 'y': lat, - 'srid': '4326', - 'token': self.api_token, - 'sector_type': self.sector_type, - }) + response = self.requests.get( + self.service_root_url.strip('/') + '/api/v1.0/secteurs', + params={ + 'x': lon, + 'y': lat, + 'srid': '4326', + 'token': self.api_token, + 'sector_type': self.sector_type, + }, + ) response.raise_for_status() data = response.json() if len(data.get('features') or []) == 0: diff --git a/passerelle/contrib/dpark/migrations/0001_initial.py b/passerelle/contrib/dpark/migrations/0001_initial.py index c836e9ee..43f76275 100644 --- a/passerelle/contrib/dpark/migrations/0001_initial.py +++ b/passerelle/contrib/dpark/migrations/0001_initial.py @@ -14,14 +14,51 @@ class Migration(migrations.Migration): migrations.CreateModel( name='DPark', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(unique=True)), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('wsdl_url', models.URLField(help_text='URL of the SOAP wsdl endpoint', max_length=512, verbose_name='SOAP wsdl endpoint')), - ('operation_url', models.URLField(help_text='URL of the SOAP operation endpoint', max_length=512, verbose_name='SOAP operation endpoint')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_dpark_users_+', related_query_name='+', blank=True)), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + 'wsdl_url', + models.URLField( + help_text='URL of the SOAP wsdl endpoint', + max_length=512, + verbose_name='SOAP wsdl endpoint', + ), + ), + ( + 'operation_url', + models.URLField( + help_text='URL of the SOAP operation endpoint', + max_length=512, + verbose_name='SOAP operation endpoint', + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_dpark_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': 'D-Park connector', @@ -30,7 +67,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Pairing', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('nameid', models.CharField(max_length=256)), ('lastname', models.CharField(max_length=128)), ('firstnames', models.CharField(max_length=128)), diff --git a/passerelle/contrib/dpark/models.py b/passerelle/contrib/dpark/models.py index a6c735b4..73be9cf1 100644 --- a/passerelle/contrib/dpark/models.py +++ b/passerelle/contrib/dpark/models.py @@ -146,11 +146,13 @@ def date_or_datetime_to_local_date(value): def normalize_reply(reply): excluded = ('CodeRetour', 'MessageRetour') serialized_reply = serialize_object(reply) - data = { - key.lower(): value for key, value in serialized_reply.items() if key not in excluded} + data = {key.lower(): value for key, value in serialized_reply.items() if key not in excluded} if data.get('demande_numerodossier'): data['id'] = str(data['demande_numerodossier']) - text = '%(demande_numerodossier)s - %(demandeur_nomusuel)s %(demandeur_prenom)s - %(demande_immatvehicule1)s' % data + text = ( + '%(demande_numerodossier)s - %(demandeur_nomusuel)s %(demandeur_prenom)s - %(demande_immatvehicule1)s' + % data + ) if data.get('demande_immatvehicule2'): text += '/%s' % data['demande_immatvehicule2'] data['text'] = text @@ -193,7 +195,8 @@ def get_address_params(data): 'Adresse_BoitePostaleLieudit': data.get('address_place', ''), 'Adresse_CodePostal': data['address_zipcode'], 'Adresse_Localite': data['address_locality'], - 'Adresse_Quartier': data.get('address_district', '')} + 'Adresse_Quartier': data.get('address_district', ''), + } def get_client(instance): @@ -207,13 +210,17 @@ class DPark(BaseResource): category = _('Business Process Connectors') wsdl_url = models.URLField( - max_length=512, blank=False, + max_length=512, + blank=False, verbose_name=_('SOAP wsdl endpoint'), - help_text=_('URL of the SOAP wsdl endpoint')) + help_text=_('URL of the SOAP wsdl endpoint'), + ) operation_url = models.URLField( - max_length=512, blank=False, + max_length=512, + blank=False, verbose_name=_('SOAP operation endpoint'), - help_text=_('URL of the SOAP operation endpoint')) + help_text=_('URL of the SOAP operation endpoint'), + ) class Meta: verbose_name = _('D-Park connector') @@ -227,7 +234,7 @@ class DPark(BaseResource): bypass_erroneous_reply = kwargs.pop('bypass_erroneous_reply', False) try: reply = getattr(proxy_service, operation)(*args, **kwargs) - except (WebFault, ) as exc: + except (WebFault,) as exc: raise APIError('ServiceError: %s' % exc) except (Exception,) as exc: raise APIError('Error: %s' % exc) @@ -258,9 +265,9 @@ class DPark(BaseResource): @endpoint(perm='can_access', description=_('Check service availibity')) def ping(self, request, *args, **kwargs): - '''Checks service availibility by trying to find + """Checks service availibility by trying to find a plate number - ''' + """ self.call('FPS_Rech_Immat', 'AA-000-BB', timezone.now().isoformat()) return {'data': True} @@ -287,10 +294,18 @@ class DPark(BaseResource): def register(self, request, *args, **kwargs): data = json_loads(request.body) is_erroneous( - data, ( - 'application_id', 'applicant_title', 'applicant_lastname', 'applicant_firstnames', - 'applicant_email', 'address_sticode', 'address_zipcode', 'address_locality', 'address_district' - ) + data, + ( + 'application_id', + 'applicant_title', + 'applicant_lastname', + 'applicant_firstnames', + 'applicant_email', + 'address_sticode', + 'address_zipcode', + 'address_locality', + 'address_district', + ), ) application_id = data['application_id'] applicant = { @@ -299,7 +314,7 @@ class DPark(BaseResource): 'Demandeur_Prenom': data['applicant_firstnames'], 'Demandeur_TelephoneFixe': data.get('applicant_phone', ''), 'Demandeur_TelephonePortable': data.get('applicant_mobilephone', ''), - 'Demandeur_Email': data['applicant_email'] + 'Demandeur_Email': data['applicant_email'], } address = get_address_params(data) filenumber = data.get('filenumber', '') @@ -325,7 +340,7 @@ class DPark(BaseResource): 'Demande_CodePostalBanque': data.get('application_bank_zipcode', ''), 'Demande_VilleBanque': data.get('application_bank_city', ''), 'Demande_IBAN': data.get('application_bank_iban', ''), - 'Demande_BIC': data.get('application_bank_bic', '') + 'Demande_BIC': data.get('application_bank_bic', ''), } reply = self.call('PLS_ENREG', application_id, applicant, address, application) if filenumber: @@ -342,9 +357,14 @@ class DPark(BaseResource): if not result: return {'err': 1, 'code': result.code, 'msg': result.msg} Pairing.objects.get_or_create( - resource=self, nameid=data['nameid'], lastname=data['lastname'], - firstnames=data['firstnames'], filenumber=data['filenumber'], - badgenumber=data.get('badgenumber', 0), cardnumber=data.get('cardnumber', 0)) + resource=self, + nameid=data['nameid'], + lastname=data['lastname'], + firstnames=data['firstnames'], + filenumber=data['filenumber'], + badgenumber=data.get('badgenumber', 0), + cardnumber=data.get('cardnumber', 0), + ) return {} @endpoint(perm='can_access', methods=['post'], description=_('Unlink user to subscription')) @@ -371,7 +391,9 @@ class DPark(BaseResource): 'desc': reply.MessageRetour, } - @endpoint(name='check-renewal-time', perm='can_access', description=_('Check if renewal time has not expired')) + @endpoint( + name='check-renewal-time', perm='can_access', description=_('Check if renewal time has not expired') + ) def check_renewal_time(self, request, *args, **kwargs): data = request.GET is_erroneous(data, ('firstnames', 'lastname', 'filenumber')) @@ -379,9 +401,11 @@ class DPark(BaseResource): reply = self.call('PLS_CTRLDELAIS', *params, bypass_erroneous_reply=True) return {'data': reply.CodeRetour == '01', 'desc': reply.MessageRetour} - @endpoint(name='check-renewal-duplicate', - perm='can_access', - description=_('Check if renewal request is not a duplicate')) + @endpoint( + name='check-renewal-duplicate', + perm='can_access', + description=_('Check if renewal request is not a duplicate'), + ) def check_renewal_duplicate(self, request, *args, **kwargs): data = request.GET is_erroneous(data, ('firstnames', 'lastname', 'filenumber')) @@ -389,33 +413,58 @@ class DPark(BaseResource): reply = self.call('PLS_CTRLDOUBLRENOUV', *params, bypass_erroneous_reply=True) return {'data': reply.CodeRetour == '01', 'desc': reply.MessageRetour} - @endpoint(name='check-creation-duplicate', - perm='can_access', - description=_('Check if creation request is not a duplicate')) + @endpoint( + name='check-creation-duplicate', + perm='can_access', + description=_('Check if creation request is not a duplicate'), + ) def check_creation_duplicate(self, request, *args, **kwargs): data = request.GET - is_erroneous(data, ('applicant_firstnames', 'applicant_lastname', 'address_sticode', - 'address_zipcode', 'address_locality')) + is_erroneous( + data, + ( + 'applicant_firstnames', + 'applicant_lastname', + 'address_sticode', + 'address_zipcode', + 'address_locality', + ), + ) lastname, firstnames = data['applicant_lastname'], data['applicant_firstnames'] address = get_address_params(data) reply = self.call('PLS_CTRLDOUBLCREA', lastname, firstnames, address, bypass_erroneous_reply=True) return {'data': reply.CodeRetour == '01', 'desc': reply.MessageRetour} - @endpoint(name='check-creation-not-renewal', - perm='can_access', - description=_('Check if creation request is not a renewal request')) + @endpoint( + name='check-creation-not-renewal', + perm='can_access', + description=_('Check if creation request is not a renewal request'), + ) def check_creation_is_not_renewal(self, request, *args, **kwargs): data = request.GET - is_erroneous(data, ('applicant_firstnames', 'applicant_lastname', 'address_sticode', - 'address_zipcode', 'address_locality')) + is_erroneous( + data, + ( + 'applicant_firstnames', + 'applicant_lastname', + 'address_sticode', + 'address_zipcode', + 'address_locality', + ), + ) lastname, firstnames = data['applicant_lastname'], data['applicant_firstnames'] address = get_address_params(data) - reply = self.call('PLS_CTRLUSAGERCONNUCREA', lastname, firstnames, address, bypass_erroneous_reply=True) + reply = self.call( + 'PLS_CTRLUSAGERCONNUCREA', lastname, firstnames, address, bypass_erroneous_reply=True + ) return {'data': reply.CodeRetour == '01', 'desc': reply.MessageRetour} - @endpoint(name='payment-info', perm='can_access', - pattern=r'^(?P\w+)/$', - description=_('Get payment information')) + @endpoint( + name='payment-info', + perm='can_access', + pattern=r'^(?P\w+)/$', + description=_('Get payment information'), + ) def payment_info(self, request, nameid, *args, **kwargs): pairings = Pairing.objects.filter(resource=self, nameid=nameid) if not pairings: @@ -432,18 +481,21 @@ class DPark(BaseResource): data = json_loads(request.body) is_erroneous( data, - ('nameid', 'filenumber', - 'transaction_id', 'application_id', - 'transaction_datetime', 'total_amount', - 'application_external_id') + ( + 'nameid', + 'filenumber', + 'transaction_id', + 'application_id', + 'transaction_datetime', + 'total_amount', + 'application_external_id', + ), ) # We accept a simple date or a datetime using UTC, we convert it to Europe/Paris timezone on exit transaction_date = date_or_datetime_to_local_date(data['transaction_datetime']) if transaction_date is None: raise APIError(_('Invalid value for transaction datetime')) - pairings = Pairing.objects.filter(resource=self, - nameid=data['nameid'], - filenumber=data['filenumber']) + pairings = Pairing.objects.filter(resource=self, nameid=data['nameid'], filenumber=data['filenumber']) total_amount = int(data['total_amount']) * 100 # in cents if not pairings: raise APIError(_('No pairing exists')) @@ -455,12 +507,15 @@ class DPark(BaseResource): data.get('applicaiton_payment_type', 10), total_amount, transaction_date.strftime('%Y%m%d'), - data['transaction_id']) + data['transaction_id'], + ) for pairing in pairings: pairing.clear_cache() return {'data': True} - @endpoint(name='send-files', perm='can_access', methods=['post'], description=_('Send supporting documents')) + @endpoint( + name='send-files', perm='can_access', methods=['post'], description=_('Send supporting documents') + ) def send_files(self, request, *args, **kwargs): try: data = json_loads(request.body) @@ -503,8 +558,7 @@ class DPark(BaseResource): filename = value['filename'] if not filename.lower().endswith('.pdf'): filename += '.pdf' - attached_files.append({ - 'TypeDocument': doc_id, 'NomFichier': filename, 'Fichier': pdf_content}) + attached_files.append({'TypeDocument': doc_id, 'NomFichier': filename, 'Fichier': pdf_content}) # deduce the number of files if errors: raise APIError(errors) @@ -547,7 +601,7 @@ class Pairing(models.Model): 'lastname': self.lastname, 'filenumber': self.filenumber, 'badgenumber': self.badgenumber, - 'cardnumber': self.cardnumber + 'cardnumber': self.cardnumber, } params = make_subscriber_params(data) try: @@ -558,20 +612,14 @@ class Pairing(models.Model): cache.set(self.info_cache_key, info, self.INFO_CACHE_DURATION) return info - PAYMENT_TYPES = { - 5: 'Prélèvement mensualisé', - 10: 'Carte Bancaire via Internet' - } + PAYMENT_TYPES = {5: 'Prélèvement mensualisé', 10: 'Carte Bancaire via Internet'} def get_payment_info(self): payment = cache.get(self.payment_info_cache_key) if payment: return payment try: - reply = self.resource.call('PLS_RECUPAIEM', - self.filenumber, - self.lastname, - self.firstnames) + reply = self.resource.call('PLS_RECUPAIEM', self.filenumber, self.lastname, self.firstnames) except APIError: return None payment = normalize_reply(reply) diff --git a/passerelle/contrib/fake_family/default_database.py b/passerelle/contrib/fake_family/default_database.py index 70b58ff2..a49d081e 100644 --- a/passerelle/contrib/fake_family/default_database.py +++ b/passerelle/contrib/fake_family/default_database.py @@ -29,18 +29,18 @@ def default_database(): invoices = {} for i in range(10): - day = now + timedelta(15*(i-6)-1) + day = now + timedelta(15 * (i - 6) - 1) limit = day + timedelta(30) total_amount = '%.2f' % (random.randint(1000, 5000) / 100.0) if i > 5: amount = total_amount else: - amount = "0.00" # paid - invoices["F%d%0.2d-%d" % (day.year, day.month, random.randint(100,999))] = { + amount = "0.00" # paid + invoices["F%d%0.2d-%d" % (day.year, day.month, random.randint(100, 999))] = { "amount": amount, "label": "facture du %d/%d/%d" % (day.day, day.month, day.year), "total_amount": total_amount, - "online_payment": i%3 != 0, + "online_payment": i % 3 != 0, "created": day.strftime('%Y-%m-%d'), "pay_limit_date": limit.strftime('%Y-%m-%d'), "has_pdf": True, @@ -55,7 +55,7 @@ def default_database(): adult['password'] = 'pass%d' % i adult['email'] = 'p%d@example.net' % i adult['text'] = '%(first_name)s %(last_name)s' % adult - adult['birthdate'] = "%d-%0.2d-18" % (now.year-random.randint(20,40), random.randint(1,12)) + adult['birthdate'] = "%d-%0.2d-18" % (now.year - random.randint(20, 40), random.randint(1, 12)) adult['phone'] = '0122334455' adult['cellphone'] = '0655443322' adult['invoices'] = [] @@ -66,19 +66,19 @@ def default_database(): adult2.update(randomnames.person('adult')) adult2['last_name'] = adult['last_name'] adult2['text'] = '%(first_name)s %(last_name)s' % adult2 - adult2['birthdate'] = "%d-%0.2d-18" % (now.year-random.randint(20,40), random.randint(1,12)) - adult2['login'] = 'p%d@example.net' % (i+1) - adult2['password'] = 'pass%d' % (i+1) - adult2['email'] = 'p%d@example.net' % (i+1) - adult2['id'] = i+1 - adults['%d' % (i+1)] = adult2 + adult2['birthdate'] = "%d-%0.2d-18" % (now.year - random.randint(20, 40), random.randint(1, 12)) + adult2['login'] = 'p%d@example.net' % (i + 1) + adult2['password'] = 'pass%d' % (i + 1) + adult2['email'] = 'p%d@example.net' % (i + 1) + adult2['id'] = i + 1 + adults['%d' % (i + 1)] = adult2 children = {} - for i in range(1,51): + for i in range(1, 51): child = randomnames.person('child') child['text'] = '%(first_name)s %(last_name)s' % child - child['birthdate'] = "%d-%0.2d-18" % (now.year-random.randint(1,14), random.randint(1,12)) - child['keywords'] = ["naissance-en-"+child['birthdate'][:4]] + child['birthdate'] = "%d-%0.2d-18" % (now.year - random.randint(1, 14), random.randint(1, 12)) + child['keywords'] = ["naissance-en-" + child['birthdate'][:4]] child['id'] = i children['%d' % i] = child diff --git a/passerelle/contrib/fake_family/migrations/0001_initial.py b/passerelle/contrib/fake_family/migrations/0001_initial.py index a3feeff9..54c9c8a8 100644 --- a/passerelle/contrib/fake_family/migrations/0001_initial.py +++ b/passerelle/contrib/fake_family/migrations/0001_initial.py @@ -15,12 +15,28 @@ class Migration(migrations.Migration): migrations.CreateModel( name='FakeFamily', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('jsondatabase', django.contrib.postgres.fields.jsonb.JSONField(default=dict, verbose_name='Fake Database (JSON)', blank=True)), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_fakefamily_users_+', related_query_name='+', blank=True)), + ( + 'jsondatabase', + django.contrib.postgres.fields.jsonb.JSONField( + default=dict, verbose_name='Fake Database (JSON)', blank=True + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_fakefamily_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'Fake Family System', diff --git a/passerelle/contrib/fake_family/migrations/0002_fakefamily_log_level.py b/passerelle/contrib/fake_family/migrations/0002_fakefamily_log_level.py index 35a9e584..b5799d32 100644 --- a/passerelle/contrib/fake_family/migrations/0002_fakefamily_log_level.py +++ b/passerelle/contrib/fake_family/migrations/0002_fakefamily_log_level.py @@ -14,7 +14,19 @@ class Migration(migrations.Migration): migrations.AddField( model_name='fakefamily', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), preserve_default=True, ), ] diff --git a/passerelle/contrib/fake_family/migrations/0005_auto_20200504_1402.py b/passerelle/contrib/fake_family/migrations/0005_auto_20200504_1402.py index 6590e1eb..289de79c 100644 --- a/passerelle/contrib/fake_family/migrations/0005_auto_20200504_1402.py +++ b/passerelle/contrib/fake_family/migrations/0005_auto_20200504_1402.py @@ -16,6 +16,8 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='fakefamily', name='jsondatabase', - field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, verbose_name='Fake Database (JSON)'), + field=django.contrib.postgres.fields.jsonb.JSONField( + blank=True, verbose_name='Fake Database (JSON)' + ), ), ] diff --git a/passerelle/contrib/fake_family/models.py b/passerelle/contrib/fake_family/models.py index 8b435ece..cfaed264 100644 --- a/passerelle/contrib/fake_family/models.py +++ b/passerelle/contrib/fake_family/models.py @@ -24,8 +24,7 @@ from .default_database import default_database class FakeFamily(BaseResource): - jsondatabase = JSONField(_('Fake Database (JSON)'), - blank=True) + jsondatabase = JSONField(_('Fake Database (JSON)'), blank=True) category = _('Business Process Connectors') @@ -53,8 +52,9 @@ class FakeFamily(BaseResource): if not family_ids: raise ObjectDoesNotExist('adult "%s" not found in a family' % adult_id) if len(family_ids) > 1: - raise MultipleObjectsReturned('adult "%s" is in more than one family (%s)' % \ - (adult_id, family_ids)) + raise MultipleObjectsReturned( + 'adult "%s" is in more than one family (%s)' % (adult_id, family_ids) + ) return family_ids[0] def get_list_of(self, kind, family_id): diff --git a/passerelle/contrib/fake_family/randomnames.py b/passerelle/contrib/fake_family/randomnames.py index e4b5b5ba..3d0287c0 100644 --- a/passerelle/contrib/fake_family/randomnames.py +++ b/passerelle/contrib/fake_family/randomnames.py @@ -2,137 +2,541 @@ import random -ADDRESS = ['Allée Alexandre Vialatte', 'Allée André Breton', 'Allée Blaise Cendrars', - 'Allée Darius Milhaud', 'Avenue Bosquet', 'Avenue Carnot', - 'Avenue Aimé Césaire', 'Avenue Daumesnil', 'Avenue des Sycomores', - 'Avenue des Terroirs de France', 'Avenue des Tilleuls', 'Avenue d\'Italie', - 'Avenue Dode de la Brunerie', 'Avenue Marceau', 'Avenue Molière', - 'Avenue Montaigne', 'Avenue Mozart', 'Boulevard Auguste Blanqui', - 'Boulevard Barbès', 'Boulevard Beaumarchais', 'Boulevard d\'Algérie', - 'Boulevard d\'Aurelle de Paladines', 'Boulevard de Reims', - 'Boulevard de Reuilly', 'Boulevard de Rochechouart', - 'Boulevard des Batignolles', 'Carrefour de l\'Odéon', 'Chaussée de l\'Étang', - 'Chemin du Parc de Charonne', 'Cité Charles Godon', 'Cité Condorcet', - 'Cité d\'Angoulême', 'Cour Saint-Émilion', 'Cour Saint-Pierre', - 'Cours des Maréchaux', 'Esplanade Nathalie Sarraute', - 'Esplanade Pierre Vidal-Naquet', 'Galerie Colbert', 'Galerie de Beaujolais', - 'Galerie de la Villette', 'Galerie des Variétés', 'Hameau Béranger', - 'Hameau d\'Alleray', 'Impasse Beaubourg', 'Impasse Bonne Nouvelle', - 'Impasse Bon Secours', 'Impasse Chausson', 'Impasse d\'Amsterdam', - 'Impasse de la Santé', 'Impasse de la Tour d\'Auvergne', 'Impasse Delepine', - 'Impasse des 2 Anges', 'Impasse des 2 Cousins', 'Impasse des 3 Soeurs', - 'Parvis du Sacré-Coeur', 'Passage Abel Leblanc', 'Passage Alexandre', - 'Passage Alexandrine', 'Passage Alombert', 'Passage des Charbonniers', - 'Passage des Crayons', 'Place André Malraux', 'Place Balard', 'Place Bienvenue', - 'Place Blanche', 'Place Cambronne', 'Place Carrée', 'Place de la Porte de Saint-Cloud', - 'Place de la Porte de Vanves', 'Place de la Porte Maillot', 'Place de la Porte Molitor', - 'Place de la République', 'Place de la Sorbonne', 'Port de Javel Haut', - 'Port de la Bourdonnais', 'Port de la Concorde', 'Quai de Conti', - 'Quai de Gesvres', 'Quai de Grenelle', 'Quai de Jemmapes', 'Route de Bourbon', - 'Route de Ceinture du Lac Daumesnil', 'Route de la Brasserie', 'Route de la Cascade', - 'Route de la Croix Rouge', 'Rue Calmels Prolongée', 'Rue Cambacérès', - 'Rue Cambon', 'Rue Cambronne', 'Rue Camille Blaisot', 'Rue de Bretonvilliers', - 'Rue de Brissac', 'Rue de Brosse', 'Rue Debrousse', 'Rue de Bruxelles', - 'Rue de Bucarest', 'Rue de Buci', 'Rue de Budapest', 'Rue de Buenos Ayres', - 'Rue de l\'Annonciation', 'Rue de la Nouvelle-Calédonie', 'Rue de la Paix', - 'Rue de la Parcheminerie', 'Rue de la Pépinière', 'Rue Jean Giono', - 'Rue Jean Giraudoux', 'Rue Jean Goujon', 'Rue Jean Hugues', - 'Rue Jean-Jacques Rousseau', 'Rue Jean Lantier', 'Rue René Bazin', - 'Rue René Clair', 'Rue René Goscinny', 'Rue Riblette', 'Rue Riboutte', - 'Square Alboni', 'Square Alfred Capus', 'Square Alfred Dehodencq', - 'Square Amicie Lebaudy', 'Square André Dreyer', 'Square André Lichtenberger', - 'Villa Austerlitz', 'Villa Ballu', 'Villa Baumann', 'Villa Belliard', - 'Voie Communale', 'Voie Georges Pompidou', 'Voie Mazas'] +ADDRESS = [ + 'Allée Alexandre Vialatte', + 'Allée André Breton', + 'Allée Blaise Cendrars', + 'Allée Darius Milhaud', + 'Avenue Bosquet', + 'Avenue Carnot', + 'Avenue Aimé Césaire', + 'Avenue Daumesnil', + 'Avenue des Sycomores', + 'Avenue des Terroirs de France', + 'Avenue des Tilleuls', + 'Avenue d\'Italie', + 'Avenue Dode de la Brunerie', + 'Avenue Marceau', + 'Avenue Molière', + 'Avenue Montaigne', + 'Avenue Mozart', + 'Boulevard Auguste Blanqui', + 'Boulevard Barbès', + 'Boulevard Beaumarchais', + 'Boulevard d\'Algérie', + 'Boulevard d\'Aurelle de Paladines', + 'Boulevard de Reims', + 'Boulevard de Reuilly', + 'Boulevard de Rochechouart', + 'Boulevard des Batignolles', + 'Carrefour de l\'Odéon', + 'Chaussée de l\'Étang', + 'Chemin du Parc de Charonne', + 'Cité Charles Godon', + 'Cité Condorcet', + 'Cité d\'Angoulême', + 'Cour Saint-Émilion', + 'Cour Saint-Pierre', + 'Cours des Maréchaux', + 'Esplanade Nathalie Sarraute', + 'Esplanade Pierre Vidal-Naquet', + 'Galerie Colbert', + 'Galerie de Beaujolais', + 'Galerie de la Villette', + 'Galerie des Variétés', + 'Hameau Béranger', + 'Hameau d\'Alleray', + 'Impasse Beaubourg', + 'Impasse Bonne Nouvelle', + 'Impasse Bon Secours', + 'Impasse Chausson', + 'Impasse d\'Amsterdam', + 'Impasse de la Santé', + 'Impasse de la Tour d\'Auvergne', + 'Impasse Delepine', + 'Impasse des 2 Anges', + 'Impasse des 2 Cousins', + 'Impasse des 3 Soeurs', + 'Parvis du Sacré-Coeur', + 'Passage Abel Leblanc', + 'Passage Alexandre', + 'Passage Alexandrine', + 'Passage Alombert', + 'Passage des Charbonniers', + 'Passage des Crayons', + 'Place André Malraux', + 'Place Balard', + 'Place Bienvenue', + 'Place Blanche', + 'Place Cambronne', + 'Place Carrée', + 'Place de la Porte de Saint-Cloud', + 'Place de la Porte de Vanves', + 'Place de la Porte Maillot', + 'Place de la Porte Molitor', + 'Place de la République', + 'Place de la Sorbonne', + 'Port de Javel Haut', + 'Port de la Bourdonnais', + 'Port de la Concorde', + 'Quai de Conti', + 'Quai de Gesvres', + 'Quai de Grenelle', + 'Quai de Jemmapes', + 'Route de Bourbon', + 'Route de Ceinture du Lac Daumesnil', + 'Route de la Brasserie', + 'Route de la Cascade', + 'Route de la Croix Rouge', + 'Rue Calmels Prolongée', + 'Rue Cambacérès', + 'Rue Cambon', + 'Rue Cambronne', + 'Rue Camille Blaisot', + 'Rue de Bretonvilliers', + 'Rue de Brissac', + 'Rue de Brosse', + 'Rue Debrousse', + 'Rue de Bruxelles', + 'Rue de Bucarest', + 'Rue de Buci', + 'Rue de Budapest', + 'Rue de Buenos Ayres', + 'Rue de l\'Annonciation', + 'Rue de la Nouvelle-Calédonie', + 'Rue de la Paix', + 'Rue de la Parcheminerie', + 'Rue de la Pépinière', + 'Rue Jean Giono', + 'Rue Jean Giraudoux', + 'Rue Jean Goujon', + 'Rue Jean Hugues', + 'Rue Jean-Jacques Rousseau', + 'Rue Jean Lantier', + 'Rue René Bazin', + 'Rue René Clair', + 'Rue René Goscinny', + 'Rue Riblette', + 'Rue Riboutte', + 'Square Alboni', + 'Square Alfred Capus', + 'Square Alfred Dehodencq', + 'Square Amicie Lebaudy', + 'Square André Dreyer', + 'Square André Lichtenberger', + 'Villa Austerlitz', + 'Villa Ballu', + 'Villa Baumann', + 'Villa Belliard', + 'Voie Communale', + 'Voie Georges Pompidou', + 'Voie Mazas', +] -LASTNAME = ['MARTIN', 'BERNARD', 'ROUX', 'THOMAS', 'PETIT', 'DURAND', 'MICHEL', - 'ROBERT', 'RICHARD', 'SIMON', 'MOREAU', 'DUBOIS', 'BLANC', 'LAURENT', 'GIRARD', - 'BERTRAND', 'GARNIER', 'DAVID', 'MOREL', 'GUERIN', 'FOURNIER', 'ROY', - 'ROUSSEAU', 'ANDRE', 'GAUTIER', 'BONNET', 'LAMBERT', 'HENRY', 'FAURE', - 'MERCIER', 'VINCENT', 'CHEVALIER', 'LEROY', 'MARCHAND', 'PERRIN', - 'MORIN', 'MASSON', 'GIRAUD', 'DUPONT', 'ROBIN', 'NICOLAS', 'BRUN', - 'MATHIEU', 'CLEMENT', 'LEFEBVRE', 'FABRE', 'BARBIER', 'FRANCOIS', - 'ROUSSEL', 'ARNAUD', 'GERARD', 'AUBERT', 'DUVAL', 'LEGRAND', - 'BLANCHARD', 'BRUNET', 'LEFEVRE', 'DENIS', 'BRETON', 'PIERRE', 'ROCHE', - 'PARIS', 'BOYER', 'COLIN', 'FONTAINE', 'JEAN', 'BOURGEOIS', 'GAILLARD', - 'NOEL', 'DUMAS', 'PICARD', 'BRIAND', 'LUCAS', 'ROLLAND', 'JOLY'] +LASTNAME = [ + 'MARTIN', + 'BERNARD', + 'ROUX', + 'THOMAS', + 'PETIT', + 'DURAND', + 'MICHEL', + 'ROBERT', + 'RICHARD', + 'SIMON', + 'MOREAU', + 'DUBOIS', + 'BLANC', + 'LAURENT', + 'GIRARD', + 'BERTRAND', + 'GARNIER', + 'DAVID', + 'MOREL', + 'GUERIN', + 'FOURNIER', + 'ROY', + 'ROUSSEAU', + 'ANDRE', + 'GAUTIER', + 'BONNET', + 'LAMBERT', + 'HENRY', + 'FAURE', + 'MERCIER', + 'VINCENT', + 'CHEVALIER', + 'LEROY', + 'MARCHAND', + 'PERRIN', + 'MORIN', + 'MASSON', + 'GIRAUD', + 'DUPONT', + 'ROBIN', + 'NICOLAS', + 'BRUN', + 'MATHIEU', + 'CLEMENT', + 'LEFEBVRE', + 'FABRE', + 'BARBIER', + 'FRANCOIS', + 'ROUSSEL', + 'ARNAUD', + 'GERARD', + 'AUBERT', + 'DUVAL', + 'LEGRAND', + 'BLANCHARD', + 'BRUNET', + 'LEFEVRE', + 'DENIS', + 'BRETON', + 'PIERRE', + 'ROCHE', + 'PARIS', + 'BOYER', + 'COLIN', + 'FONTAINE', + 'JEAN', + 'BOURGEOIS', + 'GAILLARD', + 'NOEL', + 'DUMAS', + 'PICARD', + 'BRIAND', + 'LUCAS', + 'ROLLAND', + 'JOLY', +] FIRSTNAME = { - 'child': { - 'M': ['Adam', 'Alex', 'Alexandre', 'Alexis', 'Anthony', 'Antoine', - 'Benjamin', 'Cédric', 'Charles', 'Christopher', 'David', - 'Dylan', 'Édouard', 'Elliot', 'Émile', 'Étienne', 'Félix', - 'Gabriel', 'Guillaume', 'Hugo', 'Isaac', 'Jacob', 'Jérémy', - 'Jonathan', 'Julien', 'Justin', 'Léo', 'Logan', 'Loïc', - 'Louis', 'Lucas', 'Ludovic', 'Malik', 'Mathieu', 'Mathis', - 'Maxime', 'Michaël', 'Nathan', 'Nicolas', 'Noah', 'Olivier', - 'Philippe', 'Raphaël', 'Samuel', 'Simon', 'Thomas', 'Tommy', - 'Tristan', 'Victor', 'Vincent'], - 'F': ['Alexia', 'Alice', 'Alicia', 'Amélie', 'Anaïs', 'Annabelle', - 'Arianne', 'Audrey', 'Aurélie', 'Camille', 'Catherine', - 'Charlotte', 'Chloé', 'Clara', 'Coralie', 'Daphnée', - 'Delphine', 'Elizabeth', 'Élodie', 'Émilie', 'Emma', 'Emy', - 'Ève', 'Florence', 'Gabrielle', 'Jade', 'Juliette', 'Justine', - 'Laurence', 'Laurie', 'Léa', 'Léanne', 'Maélie', 'Maéva', - 'Maika', 'Marianne', 'Marilou', 'Maude', 'Maya', 'Mégan', - 'Mélodie', 'Mia', 'Noémie', 'Océane', 'Olivia', 'Rosalie', - 'Rose', 'Sarah', 'Sofia', 'Victoria'] - }, - 'adult': { - 'M': ['Jean', 'Philippe', 'Michel', 'Alain', 'Patrick', 'Nicolas', - 'Christophe', 'Pierre', 'Christian', 'Éric', 'Frédéric', - 'Laurent', 'Stéphane', 'David', 'Pascal', 'Daniel', - 'Sébastien', 'Julien', 'Thierry', 'Olivier', 'Bernard', - 'Thomas', 'Alexandre', 'Gérard', 'Didier', 'Dominique', - 'Vincent', 'François', 'Bruno', 'Guillaume', 'Jérôme', - 'Jacques', 'Marc', 'Maxime', 'Romain', 'Claude', 'Antoine', - 'Franck', 'Jean-Pierre', 'Anthony', 'Kévin', 'Gilles', - 'Cédric', 'Serge', 'André', 'Mathieu', 'Benjamin', 'Patrice', - 'Fabrice', 'Joël', 'Jérémy', 'Clément', 'Arnaud', 'Denis', - 'Paul', 'Lucas', 'Hervé', 'Jean-Claude', 'Sylvain', 'Yves', - 'Ludovic', 'Guy', 'Florian', 'Damien', 'Alexis', 'Mickaël', - 'Quentin', 'Emmanuel', 'Louis', 'Benoît', 'Jean-Luc', 'Fabien', - 'Francis', 'Hugo', 'Jonathan', 'Loïc', 'Xavier', 'Théo', - 'Adrien', 'Raphaël', 'Jean-Francois', 'Grégory', 'Robert', - 'Michaël', 'Valentin', 'Cyril', 'Jean-Marc', 'René', 'Lionel', - 'Yannick', 'Enzo', 'Yann', 'Jean-Michel', 'Baptiste', - 'Matthieu', 'Rémi', 'Georges', 'Aurélien', 'Nathan', - 'Jean-Paul'], - 'F': ['Marie', 'Nathalie', 'Isabelle', 'Sylvie', 'Catherine', - 'Martine', 'Christine', 'Françoise', 'Valerie', 'Sandrine', - 'Stephanie', 'Veronique', 'Sophie', 'Celine', 'Chantal', - 'Patricia', 'Anne', 'Brigitte', 'Julie', 'Monique', 'Aurelie', - 'Nicole', 'Laurence', 'Annie', 'Émilie', 'Dominique', - 'Virginie', 'Corinne', 'Elodie', 'Christelle', 'Camille', - 'Caroline', 'Lea', 'Sarah', 'Florence', 'Laetitia', 'Audrey', - 'Helene', 'Laura', 'Manon', 'Michele', 'Cecile', 'Christiane', - 'Beatrice', 'Claire', 'Nadine', 'Delphine', 'Pauline', - 'Karine', 'Melanie', 'Marion', 'Chloe', 'Jacqueline', - 'Elisabeth', 'Evelyne', 'Marine', 'Claudine', 'Anais', 'Lucie', - 'Danielle', 'Carole', 'Fabienne', 'Mathilde', 'Sandra', - 'Pascale', 'Annick', 'Charlotte', 'Emma', 'Severine', - 'Sabrina', 'Amandine', 'Myriam', 'Jocelyne', 'Alexandra', - 'Angelique', 'Josiane', 'Joelle', 'Agnes', 'Mireille', - 'Vanessa', 'Justine', 'Sonia', 'Bernadette', 'Emmanuelle', - 'Oceane', 'Amelie', 'Clara', 'Maryse', 'Anne-marie', 'Fanny', - 'Magali', 'Marie-christine', 'Morgane', 'Ines', 'Nadia', - 'Muriel', 'Jessica', 'Laure', 'Genevieve', 'Estelle'] - } - } + 'child': { + 'M': [ + 'Adam', + 'Alex', + 'Alexandre', + 'Alexis', + 'Anthony', + 'Antoine', + 'Benjamin', + 'Cédric', + 'Charles', + 'Christopher', + 'David', + 'Dylan', + 'Édouard', + 'Elliot', + 'Émile', + 'Étienne', + 'Félix', + 'Gabriel', + 'Guillaume', + 'Hugo', + 'Isaac', + 'Jacob', + 'Jérémy', + 'Jonathan', + 'Julien', + 'Justin', + 'Léo', + 'Logan', + 'Loïc', + 'Louis', + 'Lucas', + 'Ludovic', + 'Malik', + 'Mathieu', + 'Mathis', + 'Maxime', + 'Michaël', + 'Nathan', + 'Nicolas', + 'Noah', + 'Olivier', + 'Philippe', + 'Raphaël', + 'Samuel', + 'Simon', + 'Thomas', + 'Tommy', + 'Tristan', + 'Victor', + 'Vincent', + ], + 'F': [ + 'Alexia', + 'Alice', + 'Alicia', + 'Amélie', + 'Anaïs', + 'Annabelle', + 'Arianne', + 'Audrey', + 'Aurélie', + 'Camille', + 'Catherine', + 'Charlotte', + 'Chloé', + 'Clara', + 'Coralie', + 'Daphnée', + 'Delphine', + 'Elizabeth', + 'Élodie', + 'Émilie', + 'Emma', + 'Emy', + 'Ève', + 'Florence', + 'Gabrielle', + 'Jade', + 'Juliette', + 'Justine', + 'Laurence', + 'Laurie', + 'Léa', + 'Léanne', + 'Maélie', + 'Maéva', + 'Maika', + 'Marianne', + 'Marilou', + 'Maude', + 'Maya', + 'Mégan', + 'Mélodie', + 'Mia', + 'Noémie', + 'Océane', + 'Olivia', + 'Rosalie', + 'Rose', + 'Sarah', + 'Sofia', + 'Victoria', + ], + }, + 'adult': { + 'M': [ + 'Jean', + 'Philippe', + 'Michel', + 'Alain', + 'Patrick', + 'Nicolas', + 'Christophe', + 'Pierre', + 'Christian', + 'Éric', + 'Frédéric', + 'Laurent', + 'Stéphane', + 'David', + 'Pascal', + 'Daniel', + 'Sébastien', + 'Julien', + 'Thierry', + 'Olivier', + 'Bernard', + 'Thomas', + 'Alexandre', + 'Gérard', + 'Didier', + 'Dominique', + 'Vincent', + 'François', + 'Bruno', + 'Guillaume', + 'Jérôme', + 'Jacques', + 'Marc', + 'Maxime', + 'Romain', + 'Claude', + 'Antoine', + 'Franck', + 'Jean-Pierre', + 'Anthony', + 'Kévin', + 'Gilles', + 'Cédric', + 'Serge', + 'André', + 'Mathieu', + 'Benjamin', + 'Patrice', + 'Fabrice', + 'Joël', + 'Jérémy', + 'Clément', + 'Arnaud', + 'Denis', + 'Paul', + 'Lucas', + 'Hervé', + 'Jean-Claude', + 'Sylvain', + 'Yves', + 'Ludovic', + 'Guy', + 'Florian', + 'Damien', + 'Alexis', + 'Mickaël', + 'Quentin', + 'Emmanuel', + 'Louis', + 'Benoît', + 'Jean-Luc', + 'Fabien', + 'Francis', + 'Hugo', + 'Jonathan', + 'Loïc', + 'Xavier', + 'Théo', + 'Adrien', + 'Raphaël', + 'Jean-Francois', + 'Grégory', + 'Robert', + 'Michaël', + 'Valentin', + 'Cyril', + 'Jean-Marc', + 'René', + 'Lionel', + 'Yannick', + 'Enzo', + 'Yann', + 'Jean-Michel', + 'Baptiste', + 'Matthieu', + 'Rémi', + 'Georges', + 'Aurélien', + 'Nathan', + 'Jean-Paul', + ], + 'F': [ + 'Marie', + 'Nathalie', + 'Isabelle', + 'Sylvie', + 'Catherine', + 'Martine', + 'Christine', + 'Françoise', + 'Valerie', + 'Sandrine', + 'Stephanie', + 'Veronique', + 'Sophie', + 'Celine', + 'Chantal', + 'Patricia', + 'Anne', + 'Brigitte', + 'Julie', + 'Monique', + 'Aurelie', + 'Nicole', + 'Laurence', + 'Annie', + 'Émilie', + 'Dominique', + 'Virginie', + 'Corinne', + 'Elodie', + 'Christelle', + 'Camille', + 'Caroline', + 'Lea', + 'Sarah', + 'Florence', + 'Laetitia', + 'Audrey', + 'Helene', + 'Laura', + 'Manon', + 'Michele', + 'Cecile', + 'Christiane', + 'Beatrice', + 'Claire', + 'Nadine', + 'Delphine', + 'Pauline', + 'Karine', + 'Melanie', + 'Marion', + 'Chloe', + 'Jacqueline', + 'Elisabeth', + 'Evelyne', + 'Marine', + 'Claudine', + 'Anais', + 'Lucie', + 'Danielle', + 'Carole', + 'Fabienne', + 'Mathilde', + 'Sandra', + 'Pascale', + 'Annick', + 'Charlotte', + 'Emma', + 'Severine', + 'Sabrina', + 'Amandine', + 'Myriam', + 'Jocelyne', + 'Alexandra', + 'Angelique', + 'Josiane', + 'Joelle', + 'Agnes', + 'Mireille', + 'Vanessa', + 'Justine', + 'Sonia', + 'Bernadette', + 'Emmanuelle', + 'Oceane', + 'Amelie', + 'Clara', + 'Maryse', + 'Anne-marie', + 'Fanny', + 'Magali', + 'Marie-christine', + 'Morgane', + 'Ines', + 'Nadia', + 'Muriel', + 'Jessica', + 'Laure', + 'Genevieve', + 'Estelle', + ], + }, +} def person(kind='child'): sex = random.choice(list(FIRSTNAME[kind].keys())) first_name = random.choice(FIRSTNAME[kind][sex]) last_name = random.choice(LASTNAME) - return { - 'sex': sex, - 'first_name': first_name, - 'last_name': last_name - } + return {'sex': sex, 'first_name': first_name, 'last_name': last_name} + def address(): return { - 'address': '%d %s' % (random.randint(1,100), random.choice(ADDRESS)), + 'address': '%d %s' % (random.randint(1, 100), random.choice(ADDRESS)), 'city': 'ConnectVille', 'post_code': '75014', } diff --git a/passerelle/contrib/fake_family/urls.py b/passerelle/contrib/fake_family/urls.py index 2a521370..9087e2d1 100644 --- a/passerelle/contrib/fake_family/urls.py +++ b/passerelle/contrib/fake_family/urls.py @@ -19,16 +19,10 @@ from django.conf.urls import include, url from .views import * urlpatterns = [ - url(r'^(?P[\w,-]+)/$', FakeFamilyDetailView.as_view(), - name='fake-family-view'), - url(r'^(?P[\w,-]+)/dump/$', DumpView.as_view(), - name='fake-family-dump'), - url(r'^(?P[\w,-]+)/family/link/$', LinkView.as_view(), - name='fake-family-link'), - url(r'^(?P[\w,-]+)/family/unlink/$', UnlinkView.as_view(), - name='fake-family-unlink'), - url(r'^(?P[\w,-]+)/family/$', FamilyView.as_view(), - name='fake-family-info'), - url(r'^(?P[\w,-]+)/family/(?P[\w,-]+)/$', FamilyKeyView.as_view(), - name='fake-family-key'), + url(r'^(?P[\w,-]+)/$', FakeFamilyDetailView.as_view(), name='fake-family-view'), + url(r'^(?P[\w,-]+)/dump/$', DumpView.as_view(), name='fake-family-dump'), + url(r'^(?P[\w,-]+)/family/link/$', LinkView.as_view(), name='fake-family-link'), + url(r'^(?P[\w,-]+)/family/unlink/$', UnlinkView.as_view(), name='fake-family-unlink'), + url(r'^(?P[\w,-]+)/family/$', FamilyView.as_view(), name='fake-family-info'), + url(r'^(?P[\w,-]+)/family/(?P[\w,-]+)/$', FamilyKeyView.as_view(), name='fake-family-key'), ] diff --git a/passerelle/contrib/fake_family/views.py b/passerelle/contrib/fake_family/views.py index 4fc699c3..8880cc09 100644 --- a/passerelle/contrib/fake_family/views.py +++ b/passerelle/contrib/fake_family/views.py @@ -33,6 +33,7 @@ class FakeFamilyDetailView(GenericDetailView): class DumpView(GenericDetailView): model = FakeFamily + def get(self, request, *args, **kwargs): data = self.get_object().jsondatabase return utils.response_for_json(request, data) @@ -94,7 +95,7 @@ class FamilyDetailView(DetailView): assert nameid is not None, 'missing NameID in query string' family_id = self.resource.get_familyid_by_nameid(nameid) if family_id is None: - return None # nameid not linked to a family + return None # nameid not linked to a family self.family_id = family_id return self.get_details(*args, **kwargs) diff --git a/passerelle/contrib/gdema/migrations/0001_initial.py b/passerelle/contrib/gdema/migrations/0001_initial.py index 6cf903cb..312ea38c 100644 --- a/passerelle/contrib/gdema/migrations/0001_initial.py +++ b/passerelle/contrib/gdema/migrations/0001_initial.py @@ -14,15 +14,43 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Gdema', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('service_url', models.URLField(help_text='GDEMA API base URL', max_length=256, verbose_name='Service URL')), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + 'service_url', + models.URLField( + help_text='GDEMA API base URL', max_length=256, verbose_name='Service URL' + ), + ), ('username', models.CharField(max_length=128, verbose_name='Username')), ('password', models.CharField(max_length=128, verbose_name='Password')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_gdema_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_gdema_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': 'GDEMA', diff --git a/passerelle/contrib/gdema/models.py b/passerelle/contrib/gdema/models.py index 27a9a23c..0411052c 100644 --- a/passerelle/contrib/gdema/models.py +++ b/passerelle/contrib/gdema/models.py @@ -30,16 +30,21 @@ from passerelle.utils.api import endpoint from passerelle.utils.jsonresponse import APIError # Only for documentation -REFERENTIALS = ('service', 'typology', 'inputchannel', 'structure', 'quartierelu', - 'secteurterritoriale', 'civility', 'title') +REFERENTIALS = ( + 'service', + 'typology', + 'inputchannel', + 'structure', + 'quartierelu', + 'secteurterritoriale', + 'civility', + 'title', +) # GDEMA date format is /Date(1510786800000+0100)/ (tz is optionnal) gdema_datetime_re = re.compile( - r'/Date\(' - r'(?P\d+)' - r'(?PZ|[+-]\d{2}(?::?\d{2})?)?' - r'\)/$' + r'/Date\(' r'(?P\d+)' r'(?PZ|[+-]\d{2}(?::?\d{2})?)?' r'\)/$' ) @@ -47,7 +52,7 @@ def parse_gdema_datetime(value): match = gdema_datetime_re.match(value) if match: kw = match.groupdict() - timestamp = int(kw['timestamp_ms'])/1000.0 + timestamp = int(kw['timestamp_ms']) / 1000.0 tzinfo = kw.get('tzinfo') if tzinfo == 'Z': tzinfo = utc @@ -91,26 +96,25 @@ def gdema_datetime(value): dt = datetime.datetime(dt.year, dt.month, dt.day) if is_naive(dt): dt = make_aware(dt) - timestamp_ms = (dt-datetime.datetime(1970, 1, 1, tzinfo=utc)).total_seconds() * 1000 + timestamp_ms = (dt - datetime.datetime(1970, 1, 1, tzinfo=utc)).total_seconds() * 1000 tzinfo = dt.strftime('%z') return '/Date(%d%s)/' % (timestamp_ms, tzinfo) def to_gdema(input_dict): - ''' + """ nameDate: ... -> nameDate: /Date(...)/ name: {publik file dict} -> name: {gdema file dict} Name_Key: value -> Name: {Key: value, ...} Name_: value -> Name: [value, ...] - ''' + """ gdema_dict = {} for key, value in input_dict.items(): # nameDate: ... -> nameDate: /Date(...)/ if key.endswith('Date'): value = gdema_datetime(value) # name: {publik file dict} -> name: {gdema file dict} - if isinstance(value, dict) and ('filename' in value and - 'content' in value): + if isinstance(value, dict) and ('filename' in value and 'content' in value): value = { 'Name': value['filename'], 'Base64Stream': value['content'], @@ -128,9 +132,9 @@ def to_gdema(input_dict): if key not in gdema_dict: gdema_dict[key] = [] if len(gdema_dict[key]) >= index: - gdema_dict[key][index-1] = value + gdema_dict[key][index - 1] = value else: - holes = [None for i in range(index-len(gdema_dict[key])-1)] + holes = [None for i in range(index - len(gdema_dict[key]) - 1)] gdema_dict[key].extend(holes) gdema_dict[key].append(value) else: @@ -145,9 +149,9 @@ def to_gdema(input_dict): class Gdema(BaseResource): - service_url = models.URLField(max_length=256, blank=False, - verbose_name=_('Service URL'), - help_text=_('GDEMA API base URL')) + service_url = models.URLField( + max_length=256, blank=False, verbose_name=_('Service URL'), help_text=_('GDEMA API base URL') + ) username = models.CharField(max_length=128, blank=True, verbose_name=_('Username')) password = models.CharField(max_length=128, blank=True, verbose_name=_('Password')) @@ -192,12 +196,14 @@ class Gdema(BaseResource): for service in services: if not service_id or service['Id'] == service_id: for typology in service['Typology']: - typologies.append({ - 'id': '%s' % typology['Value'], - 'text': typology['Text'], - 'service_id': '%s' % service['Id'], - 'service_text': service['Label'], - }) + typologies.append( + { + 'id': '%s' % typology['Value'], + 'text': typology['Text'], + 'service_id': '%s' % service['Id'], + 'service_text': service['Label'], + } + ) if service_id: break return typologies @@ -205,20 +211,24 @@ class Gdema(BaseResource): def check_status(self): self.get_services() - @endpoint(name='referentiel', pattern='^(?P\w+)/*$', - description=_('Get reference items'), - example_pattern='{name}/', - parameters={ - 'name': { - 'description': _('Referential name: (%s)') % ' | '.join(REFERENTIALS), - 'example_value': 'inputchannel', - }, - 'service_id': { - 'description': _('Filter by service id (for typology referential)'), - 'example_value': '21714', - }, - }, - methods=['get'], perm='can_access') + @endpoint( + name='referentiel', + pattern='^(?P\w+)/*$', + description=_('Get reference items'), + example_pattern='{name}/', + parameters={ + 'name': { + 'description': _('Referential name: (%s)') % ' | '.join(REFERENTIALS), + 'example_value': 'inputchannel', + }, + 'service_id': { + 'description': _('Filter by service id (for typology referential)'), + 'example_value': '21714', + }, + }, + methods=['get'], + perm='can_access', + ) def referentiel(self, request, name, service_id=None): if name == 'service': return {'data': self.get_services()} @@ -227,15 +237,20 @@ class Gdema(BaseResource): data = [] items = self.request('referentiel/%s' % name) for item in items: - data.append({ - 'id': '%s' % item['Value'], - 'text': item['Text'], - }) + data.append( + { + 'id': '%s' % item['Value'], + 'text': item['Text'], + } + ) return {'data': data} - @endpoint(name='create-request', - description=_('Create a new request (POST)'), - methods=['post'], perm='can_access') + @endpoint( + name='create-request', + description=_('Create a new request (POST)'), + methods=['post'], + perm='can_access', + ) def create_request(self, request): try: payload = json_loads(request.body) @@ -247,30 +262,38 @@ class Gdema(BaseResource): data = self.request('request/create', payload) return {'data': normalize(data)} - @endpoint(name='get-request', pattern='^(?P\d+)/*$', - description=_('Get request details'), - example_pattern='{request_id}/', - parameters={ - 'request_id': { - 'description': _('Request Id'), - 'example_value': '10', - }, - }, - methods=['get'], perm='can_access') + @endpoint( + name='get-request', + pattern='^(?P\d+)/*$', + description=_('Get request details'), + example_pattern='{request_id}/', + parameters={ + 'request_id': { + 'description': _('Request Id'), + 'example_value': '10', + }, + }, + methods=['get'], + perm='can_access', + ) def get_request(self, request, request_id): data = self.request('request/%s' % request_id) return {'data': normalize(data)} - @endpoint(name='get-request-state', pattern='^(?P\d+)/*$', - description=_('Get request status'), - example_pattern='{request_id}/', - parameters={ - 'request_id': { - 'description': _('Request Id'), - 'example_value': '10', - }, - }, - methods=['get'], perm='can_access') + @endpoint( + name='get-request-state', + pattern='^(?P\d+)/*$', + description=_('Get request status'), + example_pattern='{request_id}/', + parameters={ + 'request_id': { + 'description': _('Request Id'), + 'example_value': '10', + }, + }, + methods=['get'], + perm='can_access', + ) def get_request_state(self, request, request_id): data = self.request('request/%s/state' % request_id) return {'data': normalize(data)} diff --git a/passerelle/contrib/grandlyon_streetsections/migrations/0001_initial.py b/passerelle/contrib/grandlyon_streetsections/migrations/0001_initial.py index 2dbdc1ff..6ddea070 100644 --- a/passerelle/contrib/grandlyon_streetsections/migrations/0001_initial.py +++ b/passerelle/contrib/grandlyon_streetsections/migrations/0001_initial.py @@ -14,12 +14,38 @@ class Migration(migrations.Migration): migrations.CreateModel( name='GrandLyonStreetSections', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_grandlyonstreetsections_users_+', related_query_name='+', blank=True)), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_grandlyonstreetsections_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'Sections of Grand Lyon Streets', @@ -29,7 +55,10 @@ class Migration(migrations.Migration): name='StreetSection', options={'ordering': ['normalized_name', 'nomcommune', 'bornemindroite', 'bornemingauche']}, fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('bornemindroite', models.PositiveIntegerField(null=True)), ('bornemingauche', models.PositiveIntegerField(null=True)), ('bornemaxdroite', models.PositiveIntegerField(null=True)), diff --git a/passerelle/contrib/grandlyon_streetsections/models.py b/passerelle/contrib/grandlyon_streetsections/models.py index 92353852..e9741967 100644 --- a/passerelle/contrib/grandlyon_streetsections/models.py +++ b/passerelle/contrib/grandlyon_streetsections/models.py @@ -25,9 +25,7 @@ from passerelle.base.models import BaseResource from passerelle.compat import json_loads from passerelle.utils.api import endpoint -COMMUNE_EXTRA_MAPPING = { - 'Vaulx-en-Velin': 'VAULX' -} +COMMUNE_EXTRA_MAPPING = {'Vaulx-en-Velin': 'VAULX'} DEFAULT_MIN = 0 DEFAULT_MAX = 99999 @@ -49,29 +47,27 @@ class GrandLyonStreetSections(BaseResource): class Meta: verbose_name = _('Sections of Grand Lyon Streets') - @endpoint(perm='can_access', - description=_('Get details on a section'), - parameters={ - 'streetname': { - 'description': _('Street name'), - 'example_value': 'Boulevard du Raquin', - }, - 'streetnumber': { - 'description': _('Street number'), - 'example_value': '12', - }, - 'commune': { - 'description': _('Collectivity'), - 'example_value': 'Chassieu', - }, - 'insee': { - 'description': _('INSEE Code'), - 'example_value': '69271' - } - }) + @endpoint( + perm='can_access', + description=_('Get details on a section'), + parameters={ + 'streetname': { + 'description': _('Street name'), + 'example_value': 'Boulevard du Raquin', + }, + 'streetnumber': { + 'description': _('Street number'), + 'example_value': '12', + }, + 'commune': { + 'description': _('Collectivity'), + 'example_value': 'Chassieu', + }, + 'insee': {'description': _('INSEE Code'), 'example_value': '69271'}, + }, + ) def section_info(self, request, streetname, streetnumber, commune=None, insee=None): - sections = StreetSection.objects.filter( - normalized_name=normalize_street(streetname)) + sections = StreetSection.objects.filter(normalized_name=normalize_street(streetname)) if commune: sections = sections.filter(nomcommune__startswith=normalize_commune(commune)) if insee: @@ -105,10 +101,12 @@ class GrandLyonStreetSections(BaseResource): 'nomcommune': nomcommune, 'nomcommuneorigine': section.nomcommune, # with district 'codeinsee': section.codeinsee, - } + }, } - if DEFAULT_MIN in (section.bornemindroite, section.bornemingauche) or \ - DEFAULT_MAX in (section.bornemaxdroite, section.bornemaxgauche): + if DEFAULT_MIN in (section.bornemindroite, section.bornemingauche) or DEFAULT_MAX in ( + section.bornemaxdroite, + section.bornemaxgauche, + ): default_match = match continue @@ -123,18 +121,17 @@ class GrandLyonStreetSections(BaseResource): super(GrandLyonStreetSections, self).daily() update_start = timezone.now() sections = self.requests.get( - 'https://download.data.grandlyon.com/ws/grandlyon/adr_voie_lieu.adraxevoie/all.json?maxfeatures=1000000').content + 'https://download.data.grandlyon.com/ws/grandlyon/adr_voie_lieu.adraxevoie/all.json?maxfeatures=1000000' + ).content for value in json_loads(sections).get('values'): if not value.get('codefuv') or not value.get('codetroncon'): continue section, created = StreetSection.objects.get_or_create( - codefuv=value.get('codefuv'), - codetroncon=value.get('codetroncon')) + codefuv=value.get('codefuv'), codetroncon=value.get('codetroncon') + ) for attribute in ('nom', 'nomcommune', 'domanialite', 'codeinsee'): setattr(section, attribute, value.get(attribute) or '') - for attribute in ('bornemindroite', 'bornemingauche', - 'bornemaxdroite', 'bornemaxgauche', - 'gid'): + for attribute in ('bornemindroite', 'bornemingauche', 'bornemaxdroite', 'bornemaxgauche', 'gid'): if value.get(attribute) in (None, 'None'): # data.grandlyon returned 'None' as a string at a time if 'min' in attribute: diff --git a/passerelle/contrib/greco/formdata.py b/passerelle/contrib/greco/formdata.py index f189344c..cc68f8ab 100644 --- a/passerelle/contrib/greco/formdata.py +++ b/passerelle/contrib/greco/formdata.py @@ -21,11 +21,13 @@ def is_required(value): raise ValueError('is required') return value + def to_datetime(value): if not value: return return datetime.strptime(value[:19], '%Y-%m-%dT%H:%M:%S') + def default_to_now(value): if not value: return datetime.now() @@ -41,7 +43,6 @@ CREATION_SCHEMA = ( ('danger', bool), 'mediareponse', 'priorite', - 'beneficiaire_civilite', 'beneficiaire_nom', 'beneficiaire_prenom', @@ -58,7 +59,6 @@ CREATION_SCHEMA = ( 'beneficiaire_commune', 'beneficiaire_organisation', 'beneficiaire_typetiers', - 'localisation_numerovoie', 'localisation_voie', 'localisation_codefuvvoie', @@ -68,7 +68,6 @@ CREATION_SCHEMA = ( 'localisation_voiesecante', 'localisation_codefuvvoiesecante', 'localisation_coderivolivoiesecante', - 'transmetteur_civilite', 'transmetteur_nom', 'transmetteur_prenom', @@ -79,12 +78,13 @@ CREATION_SCHEMA = ( 'transmetteur_service', ) + def list_schema_fields(schema): for fieldname in schema: yield fieldname[0] if isinstance(fieldname, tuple) else fieldname -class FormData(object): +class FormData(object): def __init__(self, formdata, schema): if not isinstance(formdata, dict): raise ValueError('formdata must be a dict') @@ -117,9 +117,8 @@ class FormData(object): attachments = { key: value for key, value in values.items() - if isinstance(value, dict) and ('filename' in value and - 'content_type' in value and - 'content' in value) + if isinstance(value, dict) + and ('filename' in value and 'content_type' in value and 'content' in value) } for key in sorted(attachments.keys()): self.attachments.append(attachments[key]) diff --git a/passerelle/contrib/greco/migrations/0001_initial.py b/passerelle/contrib/greco/migrations/0001_initial.py index f1a96c4a..c0ff4b3c 100644 --- a/passerelle/contrib/greco/migrations/0001_initial.py +++ b/passerelle/contrib/greco/migrations/0001_initial.py @@ -14,17 +14,43 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Greco', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), + ( + 'log_level', + models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), ('application', models.CharField(max_length=200, verbose_name='Application identifier')), ('token_url', models.URLField(max_length=256, verbose_name='Token URL')), ('token_authorization', models.CharField(max_length=128, verbose_name='Token Authorization')), ('wsdl_url', models.CharField(max_length=256, verbose_name='WSDL URL')), - ('verify_cert', models.BooleanField(default=True, verbose_name='Check HTTPS Certificate validity')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_greco_users_+', related_query_name='+', blank=True)), + ( + 'verify_cert', + models.BooleanField(default=True, verbose_name='Check HTTPS Certificate validity'), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_greco_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': 'GRECO Webservices', diff --git a/passerelle/contrib/greco/models.py b/passerelle/contrib/greco/models.py index 9980ef29..4b68c07a 100644 --- a/passerelle/contrib/greco/models.py +++ b/passerelle/contrib/greco/models.py @@ -73,9 +73,8 @@ class Greco(BaseResource): application = models.CharField(_('Application identifier'), max_length=200) token_url = models.URLField(_('Token URL'), max_length=256) token_authorization = models.CharField(_('Token Authorization'), max_length=128) - wsdl_url = models.CharField(_('WSDL URL'), max_length=256) # not URLField, it can be file:// - verify_cert = models.BooleanField(default=True, - verbose_name=_('Check HTTPS Certificate validity')) + wsdl_url = models.CharField(_('WSDL URL'), max_length=256) # not URLField, it can be file:// + verify_cert = models.BooleanField(default=True, verbose_name=_('Check HTTPS Certificate validity')) category = _('Business Process Connectors') @@ -89,9 +88,13 @@ class Greco(BaseResource): if token: return token headers = {'Authorization': 'Basic %s' % self.token_authorization} - resp = self.requests.post(self.token_url, headers=headers, - data={'grant_type': 'client_credentials'}, - verify=self.verify_cert, timeout=60).json() + resp = self.requests.post( + self.token_url, + headers=headers, + data={'grant_type': 'client_credentials'}, + verify=self.verify_cert, + timeout=60, + ).json() token = '%s %s' % (resp.get('token_type'), resp.get('access_token')) timeout = int(resp.get('expires_in')) cache.set(cache_key, token, timeout) @@ -109,8 +112,7 @@ class Greco(BaseResource): request.message = request.message.replace(b"contentType", b"xm:contentType") if self.attachments: # SOAP Attachement format - message = MIMEMultipart('related', type="text/xml", - start="") + message = MIMEMultipart('related', type="text/xml", start="") xml = MIMEText(None, _subtype='xml', _charset='utf-8') xml.add_header('Content-ID', '') # do not base64-encode the soap message @@ -123,8 +125,9 @@ class Greco(BaseResource): for num, attachment in enumerate(self.attachments): filename = attachment.get('filename') or 'file%s.bin' % num soap_headers.append('%s' % (num, filename, num)) - xml_payload = xml_payload.replace('', - '%s' % ''.join(soap_headers)) + xml_payload = xml_payload.replace( + '', '%s' % ''.join(soap_headers) + ) xml.set_payload(xml_payload) message.attach(xml) @@ -164,40 +167,49 @@ class Greco(BaseResource): # Manually hack message to put \r\n so that the message is # correctly read by Apache Axis strict parser. boundary = message.get_boundary() - request.message = message.as_string(unixfrom=False - ).replace(boundary + '\n', boundary + '\r\n' - ).replace('\n--' + boundary, '\r\n--' + boundary).encode('utf-8') + request.message = ( + message.as_string(unixfrom=False) + .replace(boundary + '\n', boundary + '\r\n') + .replace('\n--' + boundary, '\r\n--' + boundary) + .encode('utf-8') + ) for attachment in attachments: # substitute binary parts if attachment.get('fake_bytes'): request.message = request.message.replace( - attachment['fake_bytes'].encode('utf-8'), - attachment['real_bytes']) + attachment['fake_bytes'].encode('utf-8'), attachment['real_bytes'] + ) request.headers.update(dict(message._headers)) request.headers['Authorization'] = self.instance.get_token() - resp = self.instance.requests.post(request.url, data=request.message, - headers=request.headers, - verify=self.instance.verify_cert, - timeout=60) + resp = self.instance.requests.post( + request.url, + data=request.message, + headers=request.headers, + verify=self.instance.verify_cert, + timeout=60, + ) if resp.status_code == 401: # ask for a new token, and retry request.headers['Authorization'] = self.instance.get_token(renew=True) - resp = self.instance.requests.post(request.url, data=request.message, - headers=request.headers, - verify=self.instance.verify_cert, - timeout=60) + resp = self.instance.requests.post( + request.url, + data=request.message, + headers=request.headers, + verify=self.instance.verify_cert, + timeout=60, + ) if resp.status_code >= 400 and resp.status_code != 500: - raise APIError('HTTP Transport Error %s' % resp.status_code, - err_code='transport-error-%s' % resp.status_code) + raise APIError( + 'HTTP Transport Error %s' % resp.status_code, + err_code='transport-error-%s' % resp.status_code, + ) elif resp.status_code == 500 and b'Fault' not in resp.content: - raise APIError('Error 500, not a SOAP Fault', - err_code='transport-error-500') + raise APIError('Error 500, not a SOAP Fault', err_code='transport-error-500') return Reply(resp.status_code, resp.headers, resp.content) return Client(url=self.wsdl_url, transport=Transport(self, attachments)) - def check_status(self): if self.get_client().service.communicationTest('ping') is None: raise Exception('empty answer to communication test') @@ -234,10 +246,12 @@ class Greco(BaseResource): @endpoint(perm='can_access') def status(self, request, idgreco, iddemande=None): - resp = self.get_client().service.consulter({ - 'idgreco': idgreco, - 'iddemande': iddemande, - }) + resp = self.get_client().service.consulter( + { + 'idgreco': idgreco, + 'iddemande': iddemande, + } + ) if resp is None: raise APIError('empty response from status()') return {'data': sudsobject_to_dict(resp)} @@ -255,8 +269,7 @@ class Greco(BaseResource): raise APIError('empty response from consulter()') return {'data': sudsobject_to_dict(resp)} - @endpoint(name='add-information', perm='can_access', - methods=['get', 'post', 'put', 'patch']) + @endpoint(name='add-information', perm='can_access', methods=['get', 'post', 'put', 'patch']) def add_information(self, request, iddemande=None, idgreco=None, information=None): if request.body: payload = json_loads(request.body) @@ -265,17 +278,18 @@ class Greco(BaseResource): idgreco = payload.get('idgreco') or idgreco iddemande = payload.get('iddemande') or iddemande information = payload.get('information') or information - resp = self.get_client().service.ajouterComplementInformation({ - 'idgreco': idgreco, - 'iddemande': iddemande, - 'complementInfo': information, - }) + resp = self.get_client().service.ajouterComplementInformation( + { + 'idgreco': idgreco, + 'iddemande': iddemande, + 'complementInfo': information, + } + ) if resp is None: raise APIError('empty response from ajouterComplementInformation()') return {'data': sudsobject_to_dict(resp)} - @endpoint(perm='can_access', - methods=['get', 'post', 'put', 'patch']) + @endpoint(perm='can_access', methods=['get', 'post', 'put', 'patch']) def update(self, request, iddemande=None, idgreco=None, comment=None): if request.body: payload = json_loads(request.body) @@ -284,11 +298,13 @@ class Greco(BaseResource): idgreco = payload.get('idgreco') or idgreco iddemande = payload.get('iddemande') or iddemande comment = payload.get('comment') or comment - resp = self.get_client().service.relancer({ - 'idgreco': idgreco, - 'iddemande': iddemande, - 'commentaire': comment, - }) + resp = self.get_client().service.relancer( + { + 'idgreco': idgreco, + 'iddemande': iddemande, + 'commentaire': comment, + } + ) if resp is None: raise APIError('empty response from relancer()') return {'data': sudsobject_to_dict(resp)} diff --git a/passerelle/contrib/grenoble_gru/migrations/0001_initial.py b/passerelle/contrib/grenoble_gru/migrations/0001_initial.py index e296faf2..d40d0320 100644 --- a/passerelle/contrib/grenoble_gru/migrations/0001_initial.py +++ b/passerelle/contrib/grenoble_gru/migrations/0001_initial.py @@ -14,15 +14,46 @@ class Migration(migrations.Migration): migrations.CreateModel( name='GrenobleGRU', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('base_url', models.URLField(help_text='Grenoble GRU API base URL', max_length=256, verbose_name='Base URL')), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + 'base_url', + models.URLField( + help_text='Grenoble GRU API base URL', max_length=256, verbose_name='Base URL' + ), + ), ('username', models.CharField(max_length=128, verbose_name='Username')), ('password', models.CharField(max_length=128, verbose_name='Password')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_grenoblegru_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_grenoblegru_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'Grenoble - Gestion des signalements', diff --git a/passerelle/contrib/grenoble_gru/models.py b/passerelle/contrib/grenoble_gru/models.py index c0b4c33b..1a566a54 100644 --- a/passerelle/contrib/grenoble_gru/models.py +++ b/passerelle/contrib/grenoble_gru/models.py @@ -39,7 +39,7 @@ RESPONSE_CODES = { '20': _('Invalid input format'), '21': _('Required field not provided'), '22': _('Unexpected value (referentials)'), - '23': _('Demand already exists') + '23': _('Demand already exists'), } @@ -63,9 +63,9 @@ def check_value(data, field_name, values): class GrenobleGRU(BaseResource): - base_url = models.URLField(max_length=256, blank=False, - verbose_name=_('Base URL'), - help_text=_('Grenoble GRU API base URL')) + base_url = models.URLField( + max_length=256, blank=False, verbose_name=_('Base URL'), help_text=_('Grenoble GRU API base URL') + ) username = models.CharField(max_length=128, verbose_name=_('Username')) password = models.CharField(max_length=128, verbose_name=_('Password')) @@ -92,37 +92,50 @@ class GrenobleGRU(BaseResource): def build_gru_params(self, data): types_params = {} - payload = { - 'id': data['application_id'] - } + payload = {'id': data['application_id']} payload['dem_comp'] = types_params['dem_comp'] = data.get('dem_comp', 'Voirie') - payload.update({ - # applicant informations - 'dem_nom': data['applicant_lastname'], - 'dem_prenom': data['applicant_firstname'], - 'dem_tel': data['applicant_phone'], - 'dem_mail': data['applicant_email'], - 'dem_reponse': 1 if data.get('applicant_requires_reply') is True else 0, - 'dem_moyen_contact': check_value(data, 'applicant_contact_mode', self.types('//modeContact', True, types_params)), - 'dem_nature': check_value(data, 'applicant_status', self.types('//natureContact', True, types_params)), - - # intervention informations - 'int_type_adresse': check_value(data, 'intervention_address_type', self.types('//typeAdresse', True, types_params)), - 'int_numerovoie': data['intervention_street_number'], - 'int_libellevoie': data['intervention_street_name'], - 'int_insee': data['intervention_address_insee'], - 'int_secteur': check_value(data, 'intervention_sector', self.types('//secteur', True, types_params)), - 'int_type_numero': check_value(data, 'intervention_number_type', self.types('//typeNumero', True, types_params)), - 'int_date_demande': dateparse.parse_datetime(data['intervention_datetime']).strftime('%d%m%Y %H:%M'), - - # comments - 'obs_demande_urgente': 1 if data.get('urgent_demand') in (True, 'True', 1, '1') else 0, - 'obs_type_dysfonctionnement': check_value( - data, 'dysfonction_type', self.types('//typeDysfonctionnement', True, types_params)), - 'obs_motif': check_value(data, 'intervention_reason', self.types('//motif', True, types_params)), - 'obs_description_probleme': data.get('comment_description', ''), - }) + payload.update( + { + # applicant informations + 'dem_nom': data['applicant_lastname'], + 'dem_prenom': data['applicant_firstname'], + 'dem_tel': data['applicant_phone'], + 'dem_mail': data['applicant_email'], + 'dem_reponse': 1 if data.get('applicant_requires_reply') is True else 0, + 'dem_moyen_contact': check_value( + data, 'applicant_contact_mode', self.types('//modeContact', True, types_params) + ), + 'dem_nature': check_value( + data, 'applicant_status', self.types('//natureContact', True, types_params) + ), + # intervention informations + 'int_type_adresse': check_value( + data, 'intervention_address_type', self.types('//typeAdresse', True, types_params) + ), + 'int_numerovoie': data['intervention_street_number'], + 'int_libellevoie': data['intervention_street_name'], + 'int_insee': data['intervention_address_insee'], + 'int_secteur': check_value( + data, 'intervention_sector', self.types('//secteur', True, types_params) + ), + 'int_type_numero': check_value( + data, 'intervention_number_type', self.types('//typeNumero', True, types_params) + ), + 'int_date_demande': dateparse.parse_datetime(data['intervention_datetime']).strftime( + '%d%m%Y %H:%M' + ), + # comments + 'obs_demande_urgente': 1 if data.get('urgent_demand') in (True, 'True', 1, '1') else 0, + 'obs_type_dysfonctionnement': check_value( + data, 'dysfonction_type', self.types('//typeDysfonctionnement', True, types_params) + ), + 'obs_motif': check_value( + data, 'intervention_reason', self.types('//motif', True, types_params) + ), + 'obs_description_probleme': data.get('comment_description', ''), + } + ) if data['intervention_reason'] == '24': # code for reason 'Autre' in which case it should be specified payload['obs_motifautre'] = data.get('intervention_custom_reason', '') @@ -156,10 +169,8 @@ class GrenobleGRU(BaseResource): return [el.find('identifiant').text for el in root.xpath(path)] return { 'data': [ - { - 'id': el.find('identifiant').text, - 'text': el.find('libelle').text - } for el in root.xpath(path) + {'id': el.find('identifiant').text, 'text': el.find('libelle').text} + for el in root.xpath(path) ] } @@ -187,7 +198,9 @@ class GrenobleGRU(BaseResource): def dysfunction_types(self, request, *args, **kwargs): return self.types('//typeDysfonctionnement', params=kwargs) - @endpoint(name='intervention-descriptions', perm='can_access', description=_('Lists intervention descriptions')) + @endpoint( + name='intervention-descriptions', perm='can_access', description=_('Lists intervention descriptions') + ) def intervention_descriptions(self, request, *args, **kwargs): return self.types('//descIntervention', params=kwargs) @@ -215,8 +228,13 @@ class GrenobleGRU(BaseResource): raise APIError(RESPONSE_CODES.get(response.text, _('Unknown error code (%s)') % response.text)) return {'data': 'Demand successfully created'} - @endpoint(name='demand', perm='can_access', methods=['post'], description=_('Add attachment to a demand'), - pattern=r'(?P[\w-]+)/add-attachment/$',) + @endpoint( + name='demand', + perm='can_access', + methods=['post'], + description=_('Add attachment to a demand'), + pattern=r'(?P[\w-]+)/add-attachment/$', + ) def add_attachment_to_demand(self, request, demand_id, **kwargs): data = json_loads(request.body) if 'file' not in data: @@ -231,9 +249,11 @@ class GrenobleGRU(BaseResource): if 'content' not in file_data: raise WrongParameter(['file[content]'], []) # file data should be ordered - file_data = (('filetype', file_data['content_type']), - ('filename', file_data['filename']), - ('filecontent', file_data['content'])) + file_data = ( + ('filetype', file_data['content_type']), + ('filename', file_data['filename']), + ('filecontent', file_data['content']), + ) # file parameters should be urlencoded and sent as 'piece_jointe' param payload = {'dem_tiers_id': demand_id, 'piece_jointe': urlencode(file_data)} response = self.request('ws_update_demandePJ.php', payload) @@ -241,8 +261,9 @@ class GrenobleGRU(BaseResource): return True return False - @endpoint(name='demand', perm='can_access', description=_('Get demand'), - pattern=r'(?P[\w-]+)/$') + @endpoint( + name='demand', perm='can_access', description=_('Get demand'), pattern=r'(?P[\w-]+)/$' + ) def get_demand(self, request, demand_id, **kwargs): payload = {'dem_tiers_id': demand_id} response = self.request('ws_get_demande.php', payload) diff --git a/passerelle/contrib/iparapheur/migrations/0001_initial.py b/passerelle/contrib/iparapheur/migrations/0001_initial.py index a75ea669..c3c10c99 100644 --- a/passerelle/contrib/iparapheur/migrations/0001_initial.py +++ b/passerelle/contrib/iparapheur/migrations/0001_initial.py @@ -14,16 +14,39 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Management', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), ('wsdl_url', models.CharField(help_text='WSDL URL', max_length=128, verbose_name='WSDL URL')), - ('verify_cert', models.BooleanField(default=True, verbose_name='Check HTTPS Certificate validity')), + ( + 'verify_cert', + models.BooleanField(default=True, verbose_name='Check HTTPS Certificate validity'), + ), ('username', models.CharField(max_length=128, verbose_name='Username', blank=True)), ('password', models.CharField(max_length=128, verbose_name='Password', blank=True)), - ('keystore', models.FileField(help_text='Certificate and private key in PEM format', upload_to=b'iparapheur', null=True, verbose_name='Keystore', blank=True)), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_management_users_+', related_query_name='+', blank=True)), + ( + 'keystore', + models.FileField( + help_text='Certificate and private key in PEM format', + upload_to=b'iparapheur', + null=True, + verbose_name='Keystore', + blank=True, + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_management_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'i-Parapheur', diff --git a/passerelle/contrib/iparapheur/migrations/0003_iparapheur_log_level.py b/passerelle/contrib/iparapheur/migrations/0003_iparapheur_log_level.py index 31d7904e..7a46889a 100644 --- a/passerelle/contrib/iparapheur/migrations/0003_iparapheur_log_level.py +++ b/passerelle/contrib/iparapheur/migrations/0003_iparapheur_log_level.py @@ -14,7 +14,19 @@ class Migration(migrations.Migration): migrations.AddField( model_name='iparapheur', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), preserve_default=True, ), ] diff --git a/passerelle/contrib/iparapheur/migrations/0006_use_http_resource.py b/passerelle/contrib/iparapheur/migrations/0006_use_http_resource.py index 851c08a6..143efcf5 100644 --- a/passerelle/contrib/iparapheur/migrations/0006_use_http_resource.py +++ b/passerelle/contrib/iparapheur/migrations/0006_use_http_resource.py @@ -40,7 +40,9 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='iparapheur', name='client_certificate', - field=models.FileField(blank=True, null=True, upload_to=b'', verbose_name='TLS client certificate'), + field=models.FileField( + blank=True, null=True, upload_to=b'', verbose_name='TLS client certificate' + ), ), migrations.AddField( model_name='iparapheur', diff --git a/passerelle/contrib/iparapheur/migrations/0007_iparapheur_wsdl_endpoint_location.py b/passerelle/contrib/iparapheur/migrations/0007_iparapheur_wsdl_endpoint_location.py index 873dc5cb..b7efab55 100644 --- a/passerelle/contrib/iparapheur/migrations/0007_iparapheur_wsdl_endpoint_location.py +++ b/passerelle/contrib/iparapheur/migrations/0007_iparapheur_wsdl_endpoint_location.py @@ -15,6 +15,11 @@ class Migration(migrations.Migration): migrations.AddField( model_name='iparapheur', name='wsdl_endpoint_location', - field=models.CharField(blank=True, help_text='override WSDL endpoint location', max_length=256, verbose_name='WSDL endpoint location'), + field=models.CharField( + blank=True, + help_text='override WSDL endpoint location', + max_length=256, + verbose_name='WSDL endpoint location', + ), ), ] diff --git a/passerelle/contrib/iparapheur/models.py b/passerelle/contrib/iparapheur/models.py index ae4c6d9e..fa5a1a02 100644 --- a/passerelle/contrib/iparapheur/models.py +++ b/passerelle/contrib/iparapheur/models.py @@ -46,8 +46,8 @@ CREATE_FILE_SCHEMA = { }, 'content_type': { 'type': 'string', - } - } + }, + }, }, 'title': { 'type': 'string', @@ -63,14 +63,15 @@ CREATE_FILE_SCHEMA = { }, 'visibility': { 'type': 'string', - } - } + }, + }, } def format_type(t): return {'id': force_text(t), 'text': force_text(t)} + def format_file(f): return {'status': f.status, 'id': f.nom, 'timestamp': f.timestamp} @@ -84,12 +85,15 @@ class FileNotFoundError(Exception): class IParapheur(BaseResource, HTTPResource): - wsdl_url = models.CharField(max_length=128, blank=False, - verbose_name=_('WSDL URL'), - help_text=_('WSDL URL')) - wsdl_endpoint_location = models.CharField(max_length=256, blank=True, - verbose_name=_('WSDL endpoint location'), - help_text=_('override WSDL endpoint location')) + wsdl_url = models.CharField( + max_length=128, blank=False, verbose_name=_('WSDL URL'), help_text=_('WSDL URL') + ) + wsdl_endpoint_location = models.CharField( + max_length=256, + blank=True, + verbose_name=_('WSDL endpoint location'), + help_text=_('override WSDL endpoint location'), + ) category = _('Business Process Connectors') class Meta: @@ -108,7 +112,8 @@ class IParapheur(BaseResource, HTTPResource): soap_client.overridden_service = soap_client.create_service( # picks the first binding in the WSDL as the default list(soap_client.wsdl.bindings.keys())[0], - self.wsdl_endpoint_location) + self.wsdl_endpoint_location, + ) else: soap_client.overridden_service = soap_client.service return soap_client @@ -160,15 +165,12 @@ class IParapheur(BaseResource, HTTPResource): return {'data': [format_file(f) for f in self.call('RechercherDossiers')]} @endpoint( - perm='can_access', name='create-file', + perm='can_access', + name='create-file', post={ 'description': _('Create file'), - 'request_body': { - 'schema': { - 'application/json': CREATE_FILE_SCHEMA - } - } - } + 'request_body': {'schema': {'application/json': CREATE_FILE_SCHEMA}}, + }, ) def create_file(self, request, post_data): try: @@ -184,13 +186,14 @@ class IParapheur(BaseResource, HTTPResource): doc_type = soap_client.get_type('ns0:TypeDoc') doc = doc_type(content, content_type) generated_dossier_id = slugify(post_data['title']) - parameters = {'TypeTechnique': post_data['type'], - 'DossierID': generated_dossier_id, - 'DossierTitre': post_data['title'], - 'SousType': post_data['subtype'], - 'Visibilite': post_data['visibility'], - 'DocumentPrincipal': doc, - } + parameters = { + 'TypeTechnique': post_data['type'], + 'DossierID': generated_dossier_id, + 'DossierTitre': post_data['title'], + 'SousType': post_data['subtype'], + 'Visibilite': post_data['visibility'], + 'DocumentPrincipal': doc, + } if 'email' in post_data: parameters['EmailEmetteur'] = post_data['email'] try: @@ -241,11 +244,13 @@ class IParapheur(BaseResource, HTTPResource): if not filename: raise FileError('File title not found.') - response = HttpResponse(document['_value_1'], - content_type=document['contentType']) + response = HttpResponse(document['_value_1'], content_type=document['contentType']) ascii_filename = force_text(filename.encode('ascii', 'replace')) encoded_filename = parse.quote(force_text(filename.encode('utf-8')), safe='') - response['Content-Disposition'] = 'inline; filename=%s; filename*=UTF-8\'\'%s' % (ascii_filename, encoded_filename) + response['Content-Disposition'] = 'inline; filename=%s; filename*=UTF-8\'\'%s' % ( + ascii_filename, + encoded_filename, + ) return response @endpoint(perm='can_access', name='get-file-status', pattern='(?P[\w-]+)') @@ -258,7 +263,11 @@ class IParapheur(BaseResource, HTTPResource): raise Http404(resp.MessageRetour.message) raise FileError(resp.MessageRetour.message) last = resp.LogDossier[-1] - return {'data': { - 'annotation': last.annotation, 'nom': last.nom, - 'status': last.status, 'timestamp': last.timestamp - }} + return { + 'data': { + 'annotation': last.annotation, + 'nom': last.nom, + 'status': last.status, + 'timestamp': last.timestamp, + } + } diff --git a/passerelle/contrib/isere_ens/migrations/0001_initial.py b/passerelle/contrib/isere_ens/migrations/0001_initial.py index 69536a2d..a9ad9410 100644 --- a/passerelle/contrib/isere_ens/migrations/0001_initial.py +++ b/passerelle/contrib/isere_ens/migrations/0001_initial.py @@ -17,19 +17,56 @@ class Migration(migrations.Migration): migrations.CreateModel( name='IsereENS', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), ('description', models.TextField(verbose_name='Description')), - ('basic_auth_username', models.CharField(blank=True, max_length=128, verbose_name='Basic authentication username')), - ('basic_auth_password', models.CharField(blank=True, max_length=128, verbose_name='Basic authentication password')), - ('client_certificate', models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS client certificate')), - ('trusted_certificate_authorities', models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs')), + ( + 'basic_auth_username', + models.CharField( + blank=True, max_length=128, verbose_name='Basic authentication username' + ), + ), + ( + 'basic_auth_password', + models.CharField( + blank=True, max_length=128, verbose_name='Basic authentication password' + ), + ), + ( + 'client_certificate', + models.FileField( + blank=True, null=True, upload_to='', verbose_name='TLS client certificate' + ), + ), + ( + 'trusted_certificate_authorities', + models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs'), + ), ('verify_cert', models.BooleanField(default=True, verbose_name='TLS verify certificates')), - ('http_proxy', models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy')), - ('base_url', models.URLField(help_text='Base API URL (before /api/...)', verbose_name='Webservice Base URL')), + ( + 'http_proxy', + models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'), + ), + ( + 'base_url', + models.URLField( + help_text='Base API URL (before /api/...)', verbose_name='Webservice Base URL' + ), + ), ('token', models.CharField(max_length=128, verbose_name='Access token')), - ('users', models.ManyToManyField(blank=True, related_name='_isereens_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_isereens_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'Espaces naturels sensibles du CD38', diff --git a/passerelle/contrib/isere_ens/models.py b/passerelle/contrib/isere_ens/models.py index d80f2936..2c02d647 100644 --- a/passerelle/contrib/isere_ens/models.py +++ b/passerelle/contrib/isere_ens/models.py @@ -180,9 +180,7 @@ class IsereENS(BaseResource, HTTPResource): url = urlparse.urljoin(self.base_url, endpoint) headers = {"token": self.token} if json is not None: - response = self.requests.post( - url, params=params, json=json, headers=headers - ) + response = self.requests.post(url, params=params, json=json, headers=headers) else: response = self.requests.get(url, params=params, headers=headers) @@ -219,9 +217,7 @@ class IsereENS(BaseResource, HTTPResource): "description": _("Returns site with code=id"), }, "kind": { - "description": _( - "Returns only sites of this kind (school_group or social)" - ), + "description": _("Returns only sites of this kind (school_group or social)"), }, }, ) @@ -266,17 +262,13 @@ class IsereENS(BaseResource, HTTPResource): animators = self.request("api/1.0.0/schoolAnimator") for animator in animators: animator["id"] = str(animator["id"]) - animator["text"] = ( - "%(first_name)s %(last_name)s <%(email)s> (%(entity)s)" % animator - ) + animator["text"] = "%(first_name)s %(last_name)s <%(email)s> (%(entity)s)" % animator cache.set(cache_key, animators, 300) if id is not None: animators = [animator for animator in animators if animator["id"] == id] if q is not None: q = simplify(q) - animators = [ - animator for animator in animators if q in simplify(animator["text"]) - ] + animators = [animator for animator in animators if q in simplify(animator["text"])] return {"data": animators} @endpoint( @@ -290,9 +282,7 @@ class IsereENS(BaseResource, HTTPResource): "description": _("Number of participants"), }, "start_date": { - "description": _( - "First date of the calendar (format: YYYY-MM-DD, default: today)" - ), + "description": _("First date of the calendar (format: YYYY-MM-DD, default: today)"), }, "end_date": { "description": _( @@ -301,9 +291,7 @@ class IsereENS(BaseResource, HTTPResource): }, }, ) - def site_calendar( - self, request, site, participants="1", start_date=None, end_date=None - ): + def site_calendar(self, request, site, participants="1", start_date=None, end_date=None): if start_date: try: start_date = datetime.datetime.strptime(start_date, "%Y-%m-%d").date() @@ -364,9 +352,7 @@ class IsereENS(BaseResource, HTTPResource): else: date["status"] = "partially-open" date["details"] = ( - _( - "Morning (%(morning_status)s), Lunch (%(lunch_status)s), Afternoon (%(afternoon_status)s)" - ) + _("Morning (%(morning_status)s), Lunch (%(lunch_status)s), Afternoon (%(afternoon_status)s)") % date ) date["text"] = "%(date_format)s - %(details)s" % date diff --git a/passerelle/contrib/iws/migrations/0001_initial.py b/passerelle/contrib/iws/migrations/0001_initial.py index f4ab00f5..5ee835bb 100644 --- a/passerelle/contrib/iws/migrations/0001_initial.py +++ b/passerelle/contrib/iws/migrations/0001_initial.py @@ -14,17 +14,60 @@ class Migration(migrations.Migration): migrations.CreateModel( name='IWSConnector', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('wsdl_url', models.URLField(help_text='URL of the SOAP wsdl endpoint', max_length=400, verbose_name='SOAP wsdl endpoint')), - ('operation_endpoint', models.URLField(help_text='URL of SOAP operation endpoint', max_length=400, verbose_name='SOAP operation endpoint')), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + 'wsdl_url', + models.URLField( + help_text='URL of the SOAP wsdl endpoint', + max_length=400, + verbose_name='SOAP wsdl endpoint', + ), + ), + ( + 'operation_endpoint', + models.URLField( + help_text='URL of SOAP operation endpoint', + max_length=400, + verbose_name='SOAP operation endpoint', + ), + ), ('username', models.CharField(max_length=128, verbose_name='Service username')), - ('password', models.CharField(max_length=128, null=True, verbose_name='Service password', blank=True)), + ( + 'password', + models.CharField(max_length=128, null=True, verbose_name='Service password', blank=True), + ), ('database', models.CharField(max_length=128, verbose_name='Service database')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_iwsconnector_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_iwsconnector_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'IWS connector', diff --git a/passerelle/contrib/iws/models.py b/passerelle/contrib/iws/models.py index 68d5897b..5ed3dbdb 100644 --- a/passerelle/contrib/iws/models.py +++ b/passerelle/contrib/iws/models.py @@ -77,20 +77,21 @@ BOOKDATE_SCHEMA = { "sms": { "description": "Send sms to user before the booked date", }, - } + }, } class IWSConnector(BaseResource): wsdl_url = models.URLField( - max_length=400, verbose_name=_('SOAP wsdl endpoint'), - help_text=_('URL of the SOAP wsdl endpoint')) + max_length=400, verbose_name=_('SOAP wsdl endpoint'), help_text=_('URL of the SOAP wsdl endpoint') + ) operation_endpoint = models.URLField( - max_length=400, verbose_name=_('SOAP operation endpoint'), - help_text=_('URL of SOAP operation endpoint')) + max_length=400, + verbose_name=_('SOAP operation endpoint'), + help_text=_('URL of SOAP operation endpoint'), + ) username = models.CharField(max_length=128, verbose_name=_('Service username')) - password = models.CharField( - max_length=128, verbose_name=_('Service password'), null=True, blank=True) + password = models.CharField(max_length=128, verbose_name=_('Service password'), null=True, blank=True) database = models.CharField(max_length=128, verbose_name=_('Service database')) category = _('Business Process Connectors') @@ -100,8 +101,7 @@ class IWSConnector(BaseResource): def _soap_call(self, iws_data, method): client = self.soap_client() header = client.get_element('%sIsiWsAuthHeader' % NS) - header_value = header( - IsiLogin=self.username, IsiPassword=self.password, IsiDataBaseID=self.database) + header_value = header(IsiLogin=self.username, IsiPassword=self.password, IsiDataBaseID=self.database) client.set_default_soapheaders([header_value]) service = client.create_service('%sIsiHelpDeskServiceSoap' % NS, self.operation_endpoint) @@ -117,18 +117,16 @@ class IWSConnector(BaseResource): ws_entity = IsiWsEntity(IsiFields=soap_list) iws_res = getattr(service, method)(ws_entity) - schema_root = lxml.etree.parse(pkg_resources.resource_stream( - 'passerelle.contrib.iws', 'xsd/ReponseWS.xsd')) + schema_root = lxml.etree.parse( + pkg_resources.resource_stream('passerelle.contrib.iws', 'xsd/ReponseWS.xsd') + ) schema = lxml.etree.XMLSchema(schema_root) parser = lxml.etree.XMLParser(schema=schema, encoding='utf-8') try: tree = lxml.etree.fromstring(iws_res.encode('utf-8'), parser).getroottree() except lxml.etree.XMLSyntaxError: raise APIError("IWS response is not valid") - result = { - "status": tree.find('//Statut').text, - "trace": tree.find('//Trace').text - } + result = {"status": tree.find('//Statut').text, "trace": tree.find('//Trace').text} fields = {} for data_field in tree.xpath('//IsiWsDataField'): fields[data_field.find('IsiField').text] = data_field.find('IsiValue').text @@ -141,34 +139,23 @@ class IWSConnector(BaseResource): raise APIError('iws error, status: "%(status)s", trace: "%(trace)s"' % iws_res) @endpoint( - methods=['get'], perm='can_access', example_pattern='{sti_code}/{request_type}/{volume}/', + methods=['get'], + perm='can_access', + example_pattern='{sti_code}/{request_type}/{volume}/', pattern='^(?P[0-9]{16}.?)/(?P\w+)/(?P[0-9]+)/$', parameters={ - 'sti_code': { - 'description': _('Address STI code'), 'example_value': '3155570464130003' - }, - 'request_type': { - 'description': _('DECHET or ENCOMBRANT'), - 'example_value': 'DECHET' - }, - 'volume': { - 'description': _('Volume of waste'), - 'example_value': '1' - }, - 'city': { - 'description': _('City'), - 'example_value': 'TOULOUSE' - }, - 'session_id': { - 'description': _('Session identifier'), - 'example_value': '7a896f464ede7b4e' - }, + 'sti_code': {'description': _('Address STI code'), 'example_value': '3155570464130003'}, + 'request_type': {'description': _('DECHET or ENCOMBRANT'), 'example_value': 'DECHET'}, + 'volume': {'description': _('Volume of waste'), 'example_value': '1'}, + 'city': {'description': _('City'), 'example_value': 'TOULOUSE'}, + 'session_id': {'description': _('Session identifier'), 'example_value': '7a896f464ede7b4e'}, 'syndic': { 'description': _('Syndic'), 'example_value': 'true', 'type': 'bool', }, - }, cache_duration=120 + }, + cache_duration=120, ) def checkdate(self, request, sti_code, request_type, volume, city, session_id, syndic=False): if request_type not in ('DECHET', 'ENCOMBRANT'): @@ -187,7 +174,7 @@ class IWSConnector(BaseResource): 'C_EQUIPE': "VPVIGIE", 'I_APP_DEMANDEUR': 'booking, demandeur', 'I_AP_ADRESSEMAIL': 'booking@localhost', - 'C_TYPEPB': SYNDIC_C_TYPEPB[request_type] if syndic else C_TYPEPB[request_type] + 'C_TYPEPB': SYNDIC_C_TYPEPB[request_type] if syndic else C_TYPEPB[request_type], } iws_res = self._soap_call(iws_data, 'IsiAddAndGetCall') self._check_status(iws_res) @@ -208,15 +195,10 @@ class IWSConnector(BaseResource): dates.append({"id": raw_date, "text": date_text, "token": token}) return result - @endpoint(perm='can_access', - post={ - 'description': _('Blah'), - 'request_body': { - 'schema': { - 'application/json': BOOKDATE_SCHEMA - } - } - }) + @endpoint( + perm='can_access', + post={'description': _('Blah'), 'request_body': {'schema': {'application/json': BOOKDATE_SCHEMA}}}, + ) def bookdate(self, request, post_data): data = post_data iws_data = { diff --git a/passerelle/contrib/lille_kimoce/migrations/0001_initial.py b/passerelle/contrib/lille_kimoce/migrations/0001_initial.py index 3c8194ed..4138fa31 100644 --- a/passerelle/contrib/lille_kimoce/migrations/0001_initial.py +++ b/passerelle/contrib/lille_kimoce/migrations/0001_initial.py @@ -17,11 +17,17 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Kimoce', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), - ('base_url', models.URLField(help_text='API base URL', max_length=256, verbose_name='Base URL')), + ( + 'base_url', + models.URLField(help_text='API base URL', max_length=256, verbose_name='Base URL'), + ), ('username', models.CharField(max_length=128, verbose_name='Username')), ('password', models.CharField(max_length=128, verbose_name='Password')), ('users', models.ManyToManyField(blank=True, to='base.ApiUser')), diff --git a/passerelle/contrib/lille_kimoce/models.py b/passerelle/contrib/lille_kimoce/models.py index 73933882..3e57697f 100644 --- a/passerelle/contrib/lille_kimoce/models.py +++ b/passerelle/contrib/lille_kimoce/models.py @@ -98,15 +98,15 @@ DEMAND_SCHEMA = { 'comment': { 'description': 'demand comment', 'type': 'string', - } - } + }, + }, } class Kimoce(BaseResource): - base_url = models.URLField(max_length=256, blank=False, - verbose_name=_('Base URL'), - help_text=_('API base URL')) + base_url = models.URLField( + max_length=256, blank=False, verbose_name=_('Base URL'), help_text=_('API base URL') + ) username = models.CharField(max_length=128, verbose_name=_('Username')) password = models.CharField(max_length=128, verbose_name=_('Password')) @@ -121,8 +121,7 @@ class Kimoce(BaseResource): def check_status(self): url = urljoin(self.base_url, 'login_check') - response = self.requests.post(url, json={'username': self.username, - 'password': self.password}) + response = self.requests.post(url, json={'username': self.username, 'password': self.password}) response.raise_for_status() def get_token(self, renew=False): @@ -130,8 +129,7 @@ class Kimoce(BaseResource): if not renew and cache.get(token_key): return cache.get(token_key) url = urljoin(self.base_url, 'login_check') - response = self.requests.post(url, json={'username': self.username, - 'password': self.password}) + response = self.requests.post(url, json={'username': self.username, 'password': self.password}) if not response.status_code // 100 == 2: raise APIError(response.content) token = response.json()['token'] @@ -141,16 +139,14 @@ class Kimoce(BaseResource): def get_referential(self, endpoint, params=None): url = urljoin(self.base_url, endpoint) data = [] - response = self.requests.get(url, params=params, - auth=HttpBearerAuth(self.get_token())) + response = self.requests.get(url, params=params, auth=HttpBearerAuth(self.get_token())) if response.status_code == 401: - response = self.requests.get(url, params=params, - auth=HttpBearerAuth(self.get_token(renew=True))) + response = self.requests.get(url, params=params, auth=HttpBearerAuth(self.get_token(renew=True))) if response.status_code // 100 == 2: for member in response.json()['hydra:member']: member['number'] = member['id'] member['text'] = member['label'] - member['id'] = member['@id'] + member['id'] = member['@id'] data.append(member) return {'data': data} @@ -178,8 +174,7 @@ class Kimoce(BaseResource): # parentId is a flag to filter street names only response = self.requests.get(url, params=params, auth=HttpBearerAuth(self.get_token())) if response.status_code == 401: - response = self.requests.get(url, params=params, - auth=HttpBearerAuth(self.get_token(renew=True))) + response = self.requests.get(url, params=params, auth=HttpBearerAuth(self.get_token(renew=True))) if response.status_code // 100 == 2: for street in response.json()['hydra:member']: street['number'] = street['@id'] @@ -188,42 +183,48 @@ class Kimoce(BaseResource): data.append(street) return {'data': data} - @endpoint(perm='can_access', description=_('Create demand'), post={ - 'description': _('Create demand into KIMOCE'), - 'request_body': { - 'schema': { - 'application/json': DEMAND_SCHEMA - } - } - }) + @endpoint( + perm='can_access', + description=_('Create demand'), + post={ + 'description': _('Create demand into KIMOCE'), + 'request_body': {'schema': {'application/json': DEMAND_SCHEMA}}, + }, + ) def create_demand(self, request, post_data): - payload = {'category': post_data['category'], - 'type': post_data['type'], - 'subType': post_data['subtype'], - 'priorityId': post_data.get('priorityId', 3), - 'companyLocation': { - 'number': post_data.get('street_number', ''), - 'road': post_data.get('street_name', ''), - 'city': post_data.get('city', ''), - 'zipCode': post_data.get('zipcode', ''), - }, - 'sourceContact': {'firstname': post_data['first_name'], - 'lastname': post_data['last_name'], - 'mail': post_data['email']}, - 'pictures': [], - 'GRUResponseLink': post_data['form_url'] + payload = { + 'category': post_data['category'], + 'type': post_data['type'], + 'subType': post_data['subtype'], + 'priorityId': post_data.get('priorityId', 3), + 'companyLocation': { + 'number': post_data.get('street_number', ''), + 'road': post_data.get('street_name', ''), + 'city': post_data.get('city', ''), + 'zipCode': post_data.get('zipcode', ''), + }, + 'sourceContact': { + 'firstname': post_data['first_name'], + 'lastname': post_data['last_name'], + 'mail': post_data['email'], + }, + 'pictures': [], + 'GRUResponseLink': post_data['form_url'], } if post_data.get('lat') and post_data.get('lon'): - payload['coordinate'] = {'latitude': post_data['lat'], - 'longitude': post_data['lon']} + payload['coordinate'] = {'latitude': post_data['lat'], 'longitude': post_data['lon']} for param_name in ('picture1', 'picture2'): - if post_data.get(param_name) and isinstance(post_data[param_name], dict) and post_data[param_name].get('content'): + if ( + post_data.get(param_name) + and isinstance(post_data[param_name], dict) + and post_data[param_name].get('content') + ): payload['pictures'].append({'content': post_data[param_name]['content']}) if post_data.get('comment'): payload['comment'] = {'content': post_data['comment']} url = urljoin(self.base_url, 'demands') result = self.requests.post(url, json=payload, auth=HttpBearerAuth(self.get_token())) - if result.status_code == 401: + if result.status_code == 401: result = self.requests.post(url, json=payload, auth=HttpBearerAuth(self.get_token(renew=True))) if result.status_code // 100 == 2: return {'data': result.json()} diff --git a/passerelle/contrib/lille_urban_card/migrations/0001_initial.py b/passerelle/contrib/lille_urban_card/migrations/0001_initial.py index d7be2661..46544426 100644 --- a/passerelle/contrib/lille_urban_card/migrations/0001_initial.py +++ b/passerelle/contrib/lille_urban_card/migrations/0001_initial.py @@ -17,14 +17,28 @@ class Migration(migrations.Migration): migrations.CreateModel( name='LilleUrbanCard', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), - ('base_url', models.URLField(help_text='API base URL', max_length=256, verbose_name='Base URL')), + ( + 'base_url', + models.URLField(help_text='API base URL', max_length=256, verbose_name='Base URL'), + ), ('username', models.CharField(max_length=128, verbose_name='Username')), ('password', models.CharField(max_length=128, verbose_name='Password')), - ('users', models.ManyToManyField(blank=True, related_name='_lilleurbancard_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_lilleurbancard_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'Lille Urban Card', diff --git a/passerelle/contrib/lille_urban_card/models.py b/passerelle/contrib/lille_urban_card/models.py index 095c284e..fff63b22 100644 --- a/passerelle/contrib/lille_urban_card/models.py +++ b/passerelle/contrib/lille_urban_card/models.py @@ -34,13 +34,12 @@ class TokenError(APIError): class LilleUrbanCard(BaseResource): - base_url = models.URLField(max_length=256, blank=False, - verbose_name=_('Base URL'), - help_text=_('API base URL')) + base_url = models.URLField( + max_length=256, blank=False, verbose_name=_('Base URL'), help_text=_('API base URL') + ) username = models.CharField(max_length=128, verbose_name=_('Username')) password = models.CharField(max_length=128, verbose_name=_('Password')) - category = 'Lille' class Meta: @@ -55,8 +54,9 @@ class LilleUrbanCard(BaseResource): def get_token(self): response = self.requests.post( - urljoin(self.base_url, '/clu/ws/auth/connexion'), - json={'login': self.username, 'password': self.password}).json() + urljoin(self.base_url, '/clu/ws/auth/connexion'), + json={'login': self.username, 'password': self.password}, + ).json() if response.get('erreur'): self.logger.error('error getting token (%r)', response['erreur']) raise TokenError(response['erreur']) @@ -64,21 +64,23 @@ class LilleUrbanCard(BaseResource): @endpoint(description=_('List of socioprofessional categories')) def csp(self, request, *args, **kwargs): - return {'data': [ - {'id': '2', 'text': "Commerçant·e, chef·ffe d’entreprise"}, - {'id': '3', 'text': "Cadre, profession libérale ou intellectuel·le"}, - {'id': '4', 'text': "Profession intermédiaire"}, - {'id': '5', 'text': "Employé·e"}, - {'id': '6', 'text': "Ouvrier·e"}, - {'id': '1', 'text': "Agriculteur·rice"}, - {'id': '8', 'text': "Sans profession"}, - {'id': '81', 'text': "Demandeur·se d’emploi"}, - {'id': '82', 'text': "Enfant de 0 à 11 ans"}, - {'id': '83', 'text': "Enfant de plus de 12 ans"}, - {'id': '84', 'text': "Étudiant·e"}, - {'id': '7', 'text': "Retraité·e"}, - {'id': '99', 'text': "Ne souhaite pas se prononcer"}, - ]} + return { + 'data': [ + {'id': '2', 'text': "Commerçant·e, chef·ffe d’entreprise"}, + {'id': '3', 'text': "Cadre, profession libérale ou intellectuel·le"}, + {'id': '4', 'text': "Profession intermédiaire"}, + {'id': '5', 'text': "Employé·e"}, + {'id': '6', 'text': "Ouvrier·e"}, + {'id': '1', 'text': "Agriculteur·rice"}, + {'id': '8', 'text': "Sans profession"}, + {'id': '81', 'text': "Demandeur·se d’emploi"}, + {'id': '82', 'text': "Enfant de 0 à 11 ans"}, + {'id': '83', 'text': "Enfant de plus de 12 ans"}, + {'id': '84', 'text': "Étudiant·e"}, + {'id': '7', 'text': "Retraité·e"}, + {'id': '99', 'text': "Ne souhaite pas se prononcer"}, + ] + } def preprocess_contact_data(self, data): if data.get('telephone'): @@ -102,9 +104,12 @@ class LilleUrbanCard(BaseResource): for kind_of_optional_field in ('nom_naissance', 'telephone', 'complement_numero_voie'): if not data.get(kind_of_optional_field): data[kind_of_optional_field] = '' - for boolean_field in ('recevoir_journal_senior', 'recevoir_msg_info_senior', - 'acceptation_reg_int', 'acceptation_reg_int_resp_legal', - ): + for boolean_field in ( + 'recevoir_journal_senior', + 'recevoir_msg_info_senior', + 'acceptation_reg_int', + 'acceptation_reg_int_resp_legal', + ): if data.get(boolean_field) == 'Oui': data[boolean_field] = 1 else: @@ -134,9 +139,8 @@ class LilleUrbanCard(BaseResource): self.preprocess_contact_data(data) self.preprocess_service_data(data) response = self.requests.post( - urljoin(self.base_url, '/clu/ws/demanderCarte'), - json=data, - auth=HttpBearerAuth(self.get_token())) + urljoin(self.base_url, '/clu/ws/demanderCarte'), json=data, auth=HttpBearerAuth(self.get_token()) + ) response_json = response.json() if not isinstance(response_json, dict): self.logger.error('error requesting card (unknown response format)') @@ -147,17 +151,20 @@ class LilleUrbanCard(BaseResource): raise APIError(response_json['erreur'], data=response_json) return {'data': response_json} # {"n_demande_clu":10000005} - @endpoint(perm='can_access', - description=_('Get status of card request'), - parameters={ - 'n_demande_clu': { - 'description': _('Request number'), - } - }) + @endpoint( + perm='can_access', + description=_('Get status of card request'), + parameters={ + 'n_demande_clu': { + 'description': _('Request number'), + } + }, + ) def card_status(self, request, n_demande_clu): response = self.requests.get( - urljoin(self.base_url, '/clu/ws/consulterDemande/%s' % n_demande_clu), - auth=HttpBearerAuth(self.get_token())).json() + urljoin(self.base_url, '/clu/ws/consulterDemande/%s' % n_demande_clu), + auth=HttpBearerAuth(self.get_token()), + ).json() return {'data': response} @endpoint(perm='can_access', description=_('Add new subscriptions'), methods=['post']) @@ -170,9 +177,10 @@ class LilleUrbanCard(BaseResource): # remove attributes that are forbidden by this API del data[attribute] response = self.requests.post( - urljoin(self.base_url, '/clu/ws/ajouterAbonnements'), - json=data, - auth=HttpBearerAuth(self.get_token())) + urljoin(self.base_url, '/clu/ws/ajouterAbonnements'), + json=data, + auth=HttpBearerAuth(self.get_token()), + ) response_json = response.json() if not isinstance(response_json, dict): self.logger.error('error adding subscriptions (unknown response format)') @@ -187,9 +195,10 @@ class LilleUrbanCard(BaseResource): def code_change(self, request, *args, **kwargs): data = json_loads(request.body) response = self.requests.post( - urljoin(self.base_url, '/clu/ws/modifierCodeSecret'), - json=data, - auth=HttpBearerAuth(self.get_token())) + urljoin(self.base_url, '/clu/ws/modifierCodeSecret'), + json=data, + auth=HttpBearerAuth(self.get_token()), + ) response_json = response.json() if not isinstance(response_json, dict): self.logger.error('error changing code (unknown response format)') @@ -206,9 +215,8 @@ class LilleUrbanCard(BaseResource): if 'password' in data: data['code_secret'] = data.pop('password') response = self.requests.get( - urljoin(self.base_url, '/clu/ws/verifierMdp'), - data=data, - auth=HttpBearerAuth(self.get_token())) + urljoin(self.base_url, '/clu/ws/verifierMdp'), data=data, auth=HttpBearerAuth(self.get_token()) + ) response_json = response.json() if not isinstance(response_json, dict): self.logger.error('error checking code (unknown response format)') @@ -222,16 +230,20 @@ class LilleUrbanCard(BaseResource): raise APIError(response_json[error_attribute], data=response_json) raise APIError('invalid response', data=response_json) - @endpoint(perm='can_access', description=_('Get Card Info'), - parameters={ - 'numero_serie': { - 'description': _('Serial Number'), - } - }) + @endpoint( + perm='can_access', + description=_('Get Card Info'), + parameters={ + 'numero_serie': { + 'description': _('Serial Number'), + } + }, + ) def card_info(self, request, numero_serie, **kwargs): response = self.requests.get( - urljoin(self.base_url, '/clu/ws/consulterCarte?numero_serie=%s' % numero_serie), - auth=HttpBearerAuth(self.get_token())) + urljoin(self.base_url, '/clu/ws/consulterCarte?numero_serie=%s' % numero_serie), + auth=HttpBearerAuth(self.get_token()), + ) response_json = response.json() if not isinstance(response_json, dict): self.logger.error('error getting card info (unknown response format)') @@ -251,9 +263,8 @@ class LilleUrbanCard(BaseResource): data = json_loads(request.body) self.preprocess_contact_data(data) response = self.requests.post( - urljoin(self.base_url, '/clu/ws/revoquerCarte'), - json=data, - auth=HttpBearerAuth(self.get_token())) + urljoin(self.base_url, '/clu/ws/revoquerCarte'), json=data, auth=HttpBearerAuth(self.get_token()) + ) response_json = response.json() if not isinstance(response_json, dict): self.logger.error('error revoking card (unknown response format)') @@ -270,9 +281,10 @@ class LilleUrbanCard(BaseResource): self.preprocess_contact_data(data) self.preprocess_service_data(data) response = self.requests.post( - urljoin(self.base_url, '/clu/ws/revoquerAbonnement'), - json=data, - auth=HttpBearerAuth(self.get_token())) + urljoin(self.base_url, '/clu/ws/revoquerAbonnement'), + json=data, + auth=HttpBearerAuth(self.get_token()), + ) response_json = response.json() if not isinstance(response_json, dict): self.logger.error('error revoking subscripton (unknown response format)') diff --git a/passerelle/contrib/mdph13/admin.py b/passerelle/contrib/mdph13/admin.py index b98629d7..a1eb5ee1 100644 --- a/passerelle/contrib/mdph13/admin.py +++ b/passerelle/contrib/mdph13/admin.py @@ -8,4 +8,5 @@ class LinkAdmin(admin.ModelAdmin): search_fields = ['display_name', 'file_number', 'dob'] list_display = ['id', 'created', 'resource', 'name_id', 'file_number'] + admin.site.register(Link, LinkAdmin) diff --git a/passerelle/contrib/mdph13/migrations/0001_initial.py b/passerelle/contrib/mdph13/migrations/0001_initial.py index 38a66529..9db03fef 100644 --- a/passerelle/contrib/mdph13/migrations/0001_initial.py +++ b/passerelle/contrib/mdph13/migrations/0001_initial.py @@ -18,7 +18,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Link', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('name_id', models.CharField(max_length=256, verbose_name='NameID')), ('file_number', models.CharField(max_length=64, verbose_name='MDPH beneficiary file number')), ('secret', models.CharField(max_length=64, verbose_name='MDPH beneficiary secret')), @@ -32,18 +35,47 @@ class Migration(migrations.Migration): migrations.CreateModel( name='MDPH13Resource', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), - ('basic_auth_username', models.CharField(blank=True, max_length=128, verbose_name='Basic authentication username')), - ('basic_auth_password', models.CharField(blank=True, max_length=128, verbose_name='Basic authentication password')), - ('client_certificate', models.FileField(blank=True, null=True, upload_to=b'', verbose_name='TLS client certificate')), - ('trusted_certificate_authorities', models.FileField(blank=True, null=True, upload_to=b'', verbose_name='TLS trusted CAs')), + ( + 'basic_auth_username', + models.CharField( + blank=True, max_length=128, verbose_name='Basic authentication username' + ), + ), + ( + 'basic_auth_password', + models.CharField( + blank=True, max_length=128, verbose_name='Basic authentication password' + ), + ), + ( + 'client_certificate', + models.FileField( + blank=True, null=True, upload_to=b'', verbose_name='TLS client certificate' + ), + ), + ( + 'trusted_certificate_authorities', + models.FileField(blank=True, null=True, upload_to=b'', verbose_name='TLS trusted CAs'), + ), ('verify_cert', models.BooleanField(default=True, verbose_name='TLS verify certificates')), - ('http_proxy', models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy')), + ( + 'http_proxy', + models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'), + ), ('webservice_base_url', models.URLField(verbose_name='Webservice Base URL')), - ('users', models.ManyToManyField(blank=True, related_name='_link_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'users', + models.ManyToManyField( + blank=True, related_name='_link_users_+', related_query_name='+', to='base.ApiUser' + ), + ), ], options={ 'verbose_name': 'MDPH CD13', diff --git a/passerelle/contrib/mdph13/models.py b/passerelle/contrib/mdph13/models.py index f3858030..8bb01a95 100644 --- a/passerelle/contrib/mdph13/models.py +++ b/passerelle/contrib/mdph13/models.py @@ -33,8 +33,8 @@ from passerelle.base.models import BaseResource, HTTPResource def json_walker(value, func, path=None): - '''Walk a JSON structure of objects, arrays and scalar values, call - func(value, path) on values.''' + """Walk a JSON structure of objects, arrays and scalar values, call + func(value, path) on values.""" path = path or [] if isinstance(value, dict): for key in value: @@ -45,6 +45,7 @@ def json_walker(value, func, path=None): else: func(value, path) + ERROR_MAPPING = { 'dossier-inconnu': 'dossier-inconnu', 'secret-invalide': 'secret-invalide', @@ -77,9 +78,7 @@ class MDPH13Resource(BaseResource, HTTPResource): raise requests.RequestException('JSON expected', response=response) if content.get('err') != 0: - err_desc = ERROR_MAPPING.get( - content.get('err_code'), - 'err != 0: missing or unknown error code') + err_desc = ERROR_MAPPING.get(content.get('err_code'), 'err != 0: missing or unknown error code') raise APIError(err_desc, data=content) response.raise_for_status() return content @@ -141,18 +140,19 @@ class MDPH13Resource(BaseResource, HTTPResource): if not all(isinstance(demande.get('typologie'), six.text_type) for demande in demandes): raise APIError('typologie-must-be-a-string', data=content) if not all(demande['typologie'].lower() in typologies for demande in demandes): - unknowns = set([demande['typologie'].lower() for demande in demandes]) - set(typologies.keys()) - raise APIError('typologie-is-unknown', - data={ - 'unknowns': list(unknowns), - 'choices': typologies.keys(), - 'response': content, - }) + unknowns = set([demande['typologie'].lower() for demande in demandes]) - set( + typologies.keys() + ) + raise APIError( + 'typologie-is-unknown', + data={ + 'unknowns': list(unknowns), + 'choices': typologies.keys(), + 'response': content, + }, + ) for demande in demandes: - new_demandes.setdefault( - typologies[demande['typologie'].lower()], - [] - ).append(demande) + new_demandes.setdefault(typologies[demande['typologie'].lower()], []).append(demande) data['demandes'] = new_demandes # Check some syntaxes @@ -160,45 +160,47 @@ class MDPH13Resource(BaseResource, HTTPResource): def check(value, path): if path[-1].startswith('date_'): - if (isinstance(value, six.text_type) - and not self.DATE_RE.match(value)): + if isinstance(value, six.text_type) and not self.DATE_RE.match(value): errors.append('%s is not a date string' % '.'.join(path)) + json_walker(data, check) if errors: raise APIError('invalid-response-format', data={'errors': errors, 'response': content}) return data - @endpoint(name='link', - methods=['post'], - description=_('Create link with an extranet account'), - perm='can_access', - parameters={ - 'NameID': { - 'description': _('Publik NameID'), - 'example_value': 'xyz24d934', - }, - 'numero_dossier': { - 'description': _('MDPH13 beneficiary file number'), - 'example_value': '1234', - }, - 'secret': { - 'description': _('MDPH13 beneficiary secret'), - 'example_value': 'secret', - }, - 'date_de_naissance': { - 'description': _('MDPH13 beneficiary date of birth'), - 'example_value': '1992-03-05', - }, - 'email': { - 'description': _('Publik known email'), - 'example_value': 'john.doe@example.com', - }, - 'ip': { - 'description': _('Publik client IP'), - 'example_value': '88.67.23.45', - }, - }) + @endpoint( + name='link', + methods=['post'], + description=_('Create link with an extranet account'), + perm='can_access', + parameters={ + 'NameID': { + 'description': _('Publik NameID'), + 'example_value': 'xyz24d934', + }, + 'numero_dossier': { + 'description': _('MDPH13 beneficiary file number'), + 'example_value': '1234', + }, + 'secret': { + 'description': _('MDPH13 beneficiary secret'), + 'example_value': 'secret', + }, + 'date_de_naissance': { + 'description': _('MDPH13 beneficiary date of birth'), + 'example_value': '1992-03-05', + }, + 'email': { + 'description': _('Publik known email'), + 'example_value': 'john.doe@example.com', + }, + 'ip': { + 'description': _('Publik client IP'), + 'example_value': '88.67.23.45', + }, + }, + ) def link(self, request, NameID, numero_dossier, secret, date_de_naissance, email, ip=None): file_number = numero_dossier.strip() try: @@ -213,29 +215,26 @@ class MDPH13Resource(BaseResource, HTTPResource): if not self.EMAIL_RE.match(email): raise APIError('email is not valid', http_status=400) link, created, updated = Link.create_or_update( - resource=self, - NameID=NameID, - file_number=file_number, - secret=secret, - dob=dob, - email=email, - ip=ip) + resource=self, NameID=NameID, file_number=file_number, secret=secret, dob=dob, email=email, ip=ip + ) return {'link_id': link.pk, 'created': created, 'updated': updated} - @endpoint(name='unlink', - methods=['post', 'delete'], - description=_('Delete link with an extranet account'), - perm='can_access', - parameters={ - 'NameID': { - 'description': _('Publik NameID'), - 'example_value': 'xyz24d934', - }, - 'link_id': { - 'description': _('Identifier of the link'), - 'example_value': '1', - }, - }) + @endpoint( + name='unlink', + methods=['post', 'delete'], + description=_('Delete link with an extranet account'), + perm='can_access', + parameters={ + 'NameID': { + 'description': _('Publik NameID'), + 'example_value': 'xyz24d934', + }, + 'link_id': { + 'description': _('Identifier of the link'), + 'example_value': '1', + }, + }, + ) def unlink(self, request, NameID, link_id): qs = Link.objects.filter(resource=self, name_id=NameID) if link_id == 'all': @@ -250,35 +249,35 @@ class MDPH13Resource(BaseResource, HTTPResource): qs.delete() return {'deleted': count} - @endpoint(name='dossiers', - description=_('Get datas for all links, or for a specified one'), - perm='can_access', - parameters={ - 'NameID': { - 'description': _('Publik NameID'), - 'example_value': 'xyz24d934', - }, - 'email': { - 'description': _('Publik known email'), - 'example_value': 'john.doe@example.com', - }, - 'link_id': { - 'description': _('Link identifier'), - 'example_value': '1', - }, - 'ip': { - 'description': _('Publik client IP'), - 'example_value': '88.67.23.45', - }, - }) + @endpoint( + name='dossiers', + description=_('Get datas for all links, or for a specified one'), + perm='can_access', + parameters={ + 'NameID': { + 'description': _('Publik NameID'), + 'example_value': 'xyz24d934', + }, + 'email': { + 'description': _('Publik known email'), + 'example_value': 'john.doe@example.com', + }, + 'link_id': { + 'description': _('Link identifier'), + 'example_value': '1', + }, + 'ip': { + 'description': _('Publik client IP'), + 'example_value': '88.67.23.45', + }, + }, + ) def dossiers(self, request, NameID, email, link_id=None, ip=None): email = email.strip() if not self.EMAIL_RE.match(email): raise APIError('email is not valid', http_status=400) - qs = Link.objects.filter( - resource=self, - name_id=NameID) + qs = Link.objects.filter(resource=self, name_id=NameID) if link_id: try: link_id = int(link_id) @@ -309,36 +308,19 @@ class MDPH13Resource(BaseResource, HTTPResource): @six.python_2_unicode_compatible class Link(models.Model): - resource = models.ForeignKey( - MDPH13Resource, - on_delete=models.CASCADE) - name_id = models.CharField( - verbose_name=_('NameID'), - max_length=256) - file_number = models.CharField( - max_length=64, - verbose_name=_('MDPH beneficiary file number')) - secret = models.CharField( - verbose_name=_('MDPH beneficiary secret'), - max_length=64) - dob = models.DateField( - verbose_name=_('MDPH beneficiary date of birth')) - created = models.DateTimeField( - verbose_name=_('Creation date'), - auto_now_add=True) - display_name = models.CharField( - verbose_name=_('Display name'), - max_length=128, - blank=True) + resource = models.ForeignKey(MDPH13Resource, on_delete=models.CASCADE) + name_id = models.CharField(verbose_name=_('NameID'), max_length=256) + file_number = models.CharField(max_length=64, verbose_name=_('MDPH beneficiary file number')) + secret = models.CharField(verbose_name=_('MDPH beneficiary secret'), max_length=64) + dob = models.DateField(verbose_name=_('MDPH beneficiary date of birth')) + created = models.DateTimeField(verbose_name=_('Creation date'), auto_now_add=True) + display_name = models.CharField(verbose_name=_('Display name'), max_length=128, blank=True) def get_file(self, email=None, ip=None): # email is necessary for audit purpose mdph_file = self.resource.call_situation_dossier( - file_number=self.file_number, - secret=self.secret, - dob=self.dob, - email=email, - ip=ip) + file_number=self.file_number, secret=self.secret, dob=self.dob, email=email, ip=ip + ) display_name = self._make_display_name(mdph_file) if self.display_name != display_name: self.display_name = display_name @@ -349,11 +331,8 @@ class Link(models.Model): def create_or_update(self, resource, NameID, file_number, secret, dob, email=None, ip=None): # email is necessary for audit purpose mdph_file = resource.call_situation_dossier( - file_number=file_number, - secret=secret, - dob=dob, - email=email, - ip=ip) + file_number=file_number, secret=secret, dob=dob, email=email, ip=ip + ) display_name = self._make_display_name(mdph_file) with transaction.atomic(): @@ -365,7 +344,8 @@ class Link(models.Model): 'secret': secret, 'dob': dob, 'display_name': display_name, - }) + }, + ) updated = False if link.secret != secret or link.dob != dob or link.display_name != display_name: @@ -394,6 +374,8 @@ class Link(models.Model): class Meta: unique_together = ( - 'resource', 'name_id', 'file_number', + 'resource', + 'name_id', + 'file_number', ) ordering = ['file_number'] diff --git a/passerelle/contrib/nancypoll/migrations/0001_initial.py b/passerelle/contrib/nancypoll/migrations/0001_initial.py index 80b8724d..af6a87b1 100644 --- a/passerelle/contrib/nancypoll/migrations/0001_initial.py +++ b/passerelle/contrib/nancypoll/migrations/0001_initial.py @@ -14,13 +14,39 @@ class Migration(migrations.Migration): migrations.CreateModel( name='NancyPoll', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), + ( + 'log_level', + models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), ('csv_file', models.FileField(upload_to=b'csv', verbose_name='CSV File')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_nancypoll_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_nancypoll_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'NancyPoll', diff --git a/passerelle/contrib/nancypoll/migrations/0004_csv_upload_to.py b/passerelle/contrib/nancypoll/migrations/0004_csv_upload_to.py index 43cfb37f..76240f88 100644 --- a/passerelle/contrib/nancypoll/migrations/0004_csv_upload_to.py +++ b/passerelle/contrib/nancypoll/migrations/0004_csv_upload_to.py @@ -15,6 +15,8 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='nancypoll', name='csv_file', - field=models.FileField(upload_to=passerelle.contrib.nancypoll.models.upload_to, verbose_name='CSV File'), + field=models.FileField( + upload_to=passerelle.contrib.nancypoll.models.upload_to, verbose_name='CSV File' + ), ), ] diff --git a/passerelle/contrib/nancypoll/models.py b/passerelle/contrib/nancypoll/models.py index 5185fda6..5911fd61 100644 --- a/passerelle/contrib/nancypoll/models.py +++ b/passerelle/contrib/nancypoll/models.py @@ -11,7 +11,9 @@ from passerelle.utils.api import endpoint from passerelle.utils.jsonresponse import APIError -COLUMN_NAMES = 'street_start_number, street_end_number,,,street_side,,,,code,id,text,address,,,street_name,,canton,,,' +COLUMN_NAMES = ( + 'street_start_number, street_end_number,,,street_side,,,,code,id,text,address,,,street_name,,canton,,,' +) def to_unicode(value): @@ -75,13 +77,15 @@ class NancyPoll(BaseResource): if row[idx_side] == 'P' and int(street_no) % 2 == 1: continue - return {'data': { - 'id': row[titles.index('id')], - 'text': row[titles.index('text')], - 'code': row[titles.index('code')], - 'address': row[titles.index('address')], - 'canton': row[titles.index('canton')], - }} + return { + 'data': { + 'id': row[titles.index('id')], + 'text': row[titles.index('text')], + 'code': row[titles.index('code')], + 'address': row[titles.index('address')], + 'canton': row[titles.index('canton')], + } + } raise APIError('Polling Station Not Found') diff --git a/passerelle/contrib/planitech/migrations/0001_initial.py b/passerelle/contrib/planitech/migrations/0001_initial.py index dd832e02..3902a866 100644 --- a/passerelle/contrib/planitech/migrations/0001_initial.py +++ b/passerelle/contrib/planitech/migrations/0001_initial.py @@ -17,15 +17,39 @@ class Migration(migrations.Migration): migrations.CreateModel( name='PlanitechConnector', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(unique=True)), - ('url', models.URLField(help_text='URL of the Planitech API endpoint', max_length=400, verbose_name='Planitech API endpoint')), + ( + 'url', + models.URLField( + help_text='URL of the Planitech API endpoint', + max_length=400, + verbose_name='Planitech API endpoint', + ), + ), ('username', models.CharField(max_length=128, verbose_name='Service username')), - ('password', models.CharField(blank=True, max_length=128, null=True, verbose_name='Service password')), - ('verify_cert', models.BooleanField(default=True, verbose_name='Check HTTPS Certificate validity')), - ('users', models.ManyToManyField(blank=True, related_name='_planitechconnector_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'password', + models.CharField(blank=True, max_length=128, null=True, verbose_name='Service password'), + ), + ( + 'verify_cert', + models.BooleanField(default=True, verbose_name='Check HTTPS Certificate validity'), + ), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_planitechconnector_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'Planitech', diff --git a/passerelle/contrib/planitech/migrations/0002_planitechconnector_custom_fields.py b/passerelle/contrib/planitech/migrations/0002_planitechconnector_custom_fields.py index 59ccb98a..322505ee 100644 --- a/passerelle/contrib/planitech/migrations/0002_planitechconnector_custom_fields.py +++ b/passerelle/contrib/planitech/migrations/0002_planitechconnector_custom_fields.py @@ -16,6 +16,8 @@ class Migration(migrations.Migration): migrations.AddField( model_name='planitechconnector', name='custom_fields', - field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True, verbose_name='Custom places fields'), + field=django.contrib.postgres.fields.jsonb.JSONField( + blank=True, null=True, verbose_name='Custom places fields' + ), ), ] diff --git a/passerelle/contrib/planitech/migrations/0003_pairing.py b/passerelle/contrib/planitech/migrations/0003_pairing.py index d800e9b9..d900410c 100644 --- a/passerelle/contrib/planitech/migrations/0003_pairing.py +++ b/passerelle/contrib/planitech/migrations/0003_pairing.py @@ -16,11 +16,19 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Pairing', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('name_id', models.CharField(max_length=256)), ('external_id', models.CharField(max_length=256)), ('created', models.DateTimeField(auto_now_add=True)), - ('resource', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='planitech.PlanitechConnector')), + ( + 'resource', + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to='planitech.PlanitechConnector' + ), + ), ], ), migrations.AlterUniqueTogether( diff --git a/passerelle/contrib/planitech/migrations/0005_auto_20200504_1402.py b/passerelle/contrib/planitech/migrations/0005_auto_20200504_1402.py index ae39c3b3..9b3433c9 100644 --- a/passerelle/contrib/planitech/migrations/0005_auto_20200504_1402.py +++ b/passerelle/contrib/planitech/migrations/0005_auto_20200504_1402.py @@ -19,6 +19,10 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='planitechconnector', name='url', - field=models.URLField(help_text='URL of the Planitec API endpoint', max_length=400, verbose_name='Planitec API endpoint'), + field=models.URLField( + help_text='URL of the Planitec API endpoint', + max_length=400, + verbose_name='Planitec API endpoint', + ), ), ] diff --git a/passerelle/contrib/planitech/models.py b/passerelle/contrib/planitech/models.py index 0f4482e0..253eb2d2 100644 --- a/passerelle/contrib/planitech/models.py +++ b/passerelle/contrib/planitech/models.py @@ -47,8 +47,19 @@ CREATE_RESERVATION_SCHEMA = { "description": "", "type": "object", "required": [ - "date", "start_time", "end_time", "place_id", "price", "name_id", "first_name", - "last_name", "email", "activity_id", "object", "type_id", "vat_rate" + "date", + "start_time", + "end_time", + "place_id", + "price", + "name_id", + "first_name", + "last_name", + "email", + "activity_id", + "object", + "type_id", + "vat_rate", ], "properties": { "date": { @@ -106,8 +117,8 @@ CREATE_RESERVATION_SCHEMA = { "price_code": { "description": "User price code", "type": "string", - } - } + }, + }, } @@ -117,8 +128,16 @@ GET_RESERVATION_PRICE_SCHEMA = { "description": "", "type": "object", "required": [ - "date", "start_time", "end_time", "place_id", "name_id", "first_name", "last_name", - "email", "activity_id", "type_id" + "date", + "start_time", + "end_time", + "place_id", + "name_id", + "first_name", + "last_name", + "email", + "activity_id", + "type_id", ], "properties": { "date": { @@ -164,14 +183,12 @@ GET_RESERVATION_PRICE_SCHEMA = { "price_code": { "description": "User price code", "type": "string", - } - } + }, + }, } -RESERVATION_STATUS = { - "confirmed": 3, "invalid": 0, " pre-reservation": 1, "standard": 2 -} +RESERVATION_STATUS = {"confirmed": 3, "invalid": 0, " pre-reservation": 1, "standard": 2} UPDATE_RESERVATION_SCHEMA = { "$schema": "http://json-schema.org/draft-04/schema#", @@ -187,9 +204,9 @@ UPDATE_RESERVATION_SCHEMA = { "status": { "description": "Status of the reservation", "type": "string", - "enum": list(RESERVATION_STATUS.keys()) - } - } + "enum": list(RESERVATION_STATUS.keys()), + }, + }, } @@ -258,24 +275,22 @@ def get_extensions(post_data): for key in ('name', 'value'): if key not in extension: raise APIError("Missing '%s' in extension" % key) - res.append({ - 'name': extension['name'], - 'value': extension['value'], - 'type': extension.get('type', 'string') - }) + res.append( + {'name': extension['name'], 'value': extension['value'], 'type': extension.get('type', 'string')} + ) return res class PlanitechConnector(BaseResource): url = models.URLField( - max_length=400, verbose_name=_('Planitec API endpoint'), - help_text=_('URL of the Planitec API endpoint')) + max_length=400, + verbose_name=_('Planitec API endpoint'), + help_text=_('URL of the Planitec API endpoint'), + ) username = models.CharField(max_length=128, verbose_name=_('Service username')) - password = models.CharField( - max_length=128, verbose_name=_('Service password'), null=True, blank=True) - verify_cert = models.BooleanField( - default=True, verbose_name=_('Check HTTPS Certificate validity')) + password = models.CharField(max_length=128, verbose_name=_('Service password'), null=True, blank=True) + verify_cert = models.BooleanField(default=True, verbose_name=_('Check HTTPS Certificate validity')) custom_fields = JSONField(_('Custom places fields'), blank=True, null=True) price_code = models.CharField(max_length=128, verbose_name=_('Price code'), blank=True) @@ -319,30 +334,21 @@ class PlanitechConnector(BaseResource): ref = {} for place in data['placesList']: place_id = int(place['identifier']) - ref[place_id] = { - 'identifier': place_id, 'label': place['label'] - } + ref[place_id] = {'identifier': place_id, 'label': place['label']} - extensionAttributes = { - 'capacity': { - 'name': 'TOTCAP', - 'type': 'int' - } - } + extensionAttributes = {'capacity': {'name': 'TOTCAP', 'type': 'int'}} for custom_field in self._get_places_fields(): field_name = custom_field['name'] - extensionAttributes[field_name] = { - 'name': field_name, - 'type': custom_field['type'] - } + extensionAttributes[field_name] = {'name': field_name, 'type': custom_field['type']} data = self._call_planitech( - self.requests.post, 'getPlacesInfo', + self.requests.post, + 'getPlacesInfo', { "placeIdentifiers": [float(key) for key in ref.keys()], - "extensionAttributes": extensionAttributes - } + "extensionAttributes": extensionAttributes, + }, ) for place in data['requestedPlaces']: @@ -363,7 +369,7 @@ class PlanitechConnector(BaseResource): return ref def _get_places_referential( - self, min_capacity=DEFAULT_MIN_CAPACITY, max_capacity=DEFAULT_MAX_CAPACITY, **kwargs + self, min_capacity=DEFAULT_MIN_CAPACITY, max_capacity=DEFAULT_MAX_CAPACITY, **kwargs ): ref = self._raw_get_places_referential() @@ -422,8 +428,10 @@ class PlanitechConnector(BaseResource): with transaction.atomic(): pairing, created = Pairing.objects.get_or_create( - resource=self, name_id=post_data['name_id'], - defaults={'external_id': uuid.uuid4().hex, 'price_code': price_code}) + resource=self, + name_id=post_data['name_id'], + defaults={'external_id': uuid.uuid4().hex, 'price_code': price_code}, + ) if created: # Create planitec user params = { @@ -431,7 +439,7 @@ class PlanitechConnector(BaseResource): "name": post_data['last_name'], "firstName": post_data['first_name'], "mail": post_data['email'], - "pricingCode": price_code + "pricingCode": price_code, } data = self._call_planitech(self.requests.post, 'createPerson', params) if data.get('creationStatus') != 'OK': @@ -441,10 +449,7 @@ class PlanitechConnector(BaseResource): # Update planitec user pairing.price_code = dyn_price_code pairing.save() - params = { - 'externalUserIdentifier': pairing.external_id, - 'pricingCode': dyn_price_code - } + params = {'externalUserIdentifier': pairing.external_id, 'pricingCode': dyn_price_code} data = self._call_planitech(self.requests.post, 'updatePerson', params) if data.get('modificationStatus') != 'OK': raise APIError("Person update failed: %s" % data.get('modificationStatus')) @@ -455,12 +460,8 @@ class PlanitechConnector(BaseResource): perm='can_access', post={ 'description': _('Get reservation price'), - 'request_body': { - 'schema': { - 'application/json': GET_RESERVATION_PRICE_SCHEMA - } - } - } + 'request_body': {'schema': {'application/json': GET_RESERVATION_PRICE_SCHEMA}}, + }, ) def getreservationprice(self, request, post_data): start_datetime = combine_date_time(post_data['date'], post_data['start_time']) @@ -483,23 +484,14 @@ class PlanitechConnector(BaseResource): price = data.get('calculatedPrice', False) if price is False: raise APIError("Get reservation price failed: no price") - return { - 'data': { - 'price': int(price), - 'raw_data': data - } - } + return {'data': {'price': int(price), 'raw_data': data}} @endpoint( perm='can_access', post={ 'description': _('Create reservation'), - 'request_body': { - 'schema': { - 'application/json': CREATE_RESERVATION_SCHEMA - } - } - } + 'request_body': {'schema': {'application/json': CREATE_RESERVATION_SCHEMA}}, + }, ) def createreservation(self, request, post_data): start_datetime = combine_date_time(post_data['date'], post_data['start_time']) @@ -519,7 +511,7 @@ class PlanitechConnector(BaseResource): "requestDate": request_date, "start": start_datetime, "typeID": mste.Uint32(post_data['type_id']), - "vatRate": mste.Uint32(post_data['vat_rate']) + "vatRate": mste.Uint32(post_data['vat_rate']), } extensions = get_extensions(post_data) if extensions: @@ -531,12 +523,7 @@ class PlanitechConnector(BaseResource): reservation_id = data.get('reservationIdentifier') if not reservation_id: raise APIError("Reservation creation failed: no reservation ID") - return { - 'data': { - 'reservation_id': int(reservation_id), - 'raw_data': data - } - } + return {'data': {'reservation_id': int(reservation_id), 'raw_data': data}} def hourly(self): self._raw_get_places_referential(refresh_cache=True) @@ -552,9 +539,7 @@ class PlanitechConnector(BaseResource): for date_obj in available_dates: date_text = dateformat.format(date_obj, 'l d F Y') short_text = dateformat.format(date_obj, 'd/m/Y') - res.append({ - "id": date_obj.isoformat(), "text": date_text, - "short_text": short_text}) + res.append({"id": date_obj.isoformat(), "text": date_text, "short_text": short_text}) return res def _place_display(self, raw_data): @@ -569,18 +554,11 @@ class PlanitechConnector(BaseResource): def _full_display(self, raw_data, places_id): places_ref = self._raw_get_places_referential() - res = { - 'date': self._date_display(raw_data), - 'place': self._place_display(raw_data) - } + res = {'date': self._date_display(raw_data), 'place': self._place_display(raw_data)} all_dates = [d['id'] for d in res['date']] full = [] for place_id in places_id: - place_data = { - 'id': place_id, - 'text': places_ref[place_id]['label'], - 'dates': [] - } + place_data = {'id': place_id, 'text': places_ref[place_id]['label'], 'dates': []} place_dates = [] for place in raw_data.get('availablePlaces', []): if place_id == int(place['placeIdentifier']): @@ -598,7 +576,8 @@ class PlanitechConnector(BaseResource): @endpoint( description_get=_('Get days available for reservation'), - methods=['get'], perm='can_access', + methods=['get'], + perm='can_access', parameters={ 'min_capacity': { 'description': _('Minimum capacity'), @@ -633,8 +612,9 @@ class PlanitechConnector(BaseResource): 'example_value': '10', }, 'weekdays': { - 'description': _('Week days, comma separated list of integers beetween' - ' 0 (sunday) and 6 (saturday)'), + 'description': _( + 'Week days, comma separated list of integers beetween' ' 0 (sunday) and 6 (saturday)' + ), 'example_value': 'true', 'type': 'string', }, @@ -647,11 +627,24 @@ class PlanitechConnector(BaseResource): 'description': _('Display'), 'example_value': 'date', }, - }) + }, + ) def getfreegaps( - self, request, display, start_time, end_time, min_capacity=DEFAULT_MIN_CAPACITY, start_date=None, - start_days=None, end_date=None, end_days=None, max_capacity=DEFAULT_MAX_CAPACITY, weekdays=None, - place_id=None, **kwargs): + self, + request, + display, + start_time, + end_time, + min_capacity=DEFAULT_MIN_CAPACITY, + start_date=None, + start_days=None, + end_date=None, + end_days=None, + max_capacity=DEFAULT_MAX_CAPACITY, + weekdays=None, + place_id=None, + **kwargs, + ): # Additional parameters check valid_displays = ['date', 'place', 'full'] @@ -683,14 +676,15 @@ class PlanitechConnector(BaseResource): places_id = [int(place_id)] else: places_id = self._get_places_referential( - min_capacity=min_capacity, max_capacity=max_capacity, **kwargs).keys() + min_capacity=min_capacity, max_capacity=max_capacity, **kwargs + ).keys() params = { "placeIdentifiers": [float(p_id) for p_id in places_id], "startingDate": utc_start_datetime, "endingDate": utc_end_datetime, "requestedStartingTime": float(0), - "requestedEndingTime": duration + "requestedEndingTime": duration, } if weekdays is not None: @@ -702,8 +696,7 @@ class PlanitechConnector(BaseResource): raise ValueError() reservation_days.append(mste.Uint32(day)) except (ValueError, TypeError): - raise APIError( - 'weekdays must be a comma separated list of integers beetween 0 and 6') + raise APIError('weekdays must be a comma separated list of integers beetween 0 and 6') if reservation_days: params['reservationDays'] = reservation_days @@ -743,15 +736,11 @@ class PlanitechConnector(BaseResource): ref = self._raw_get_places_referential() if id_ not in ref: raise APIError('No place with ID %s' % id_) - return { - 'data': ref[int(id_)] - } + return {'data': ref[int(id_)]} @endpoint(description_get=_('Get places referential'), methods=['get'], perm='can_access') def getplacesreferential(self, request, **kwargs): - return { - 'data': self._get_places_referential(**kwargs) - } + return {'data': self._get_places_referential(**kwargs)} @endpoint(description_get=_('Get reservation infos'), methods=['get'], perm='can_access') def getreservationsinfo(self, request, reservation_id): @@ -779,20 +768,17 @@ class PlanitechConnector(BaseResource): return {'data': self.generic_call('getUsersList', 'usersList')} @endpoint( - methods=['post'], perm='can_access', + methods=['post'], + perm='can_access', post={ 'description': _('Update reservation'), - 'request_body': { - 'schema': { - 'application/json': UPDATE_RESERVATION_SCHEMA - } - } - } + 'request_body': {'schema': {'application/json': UPDATE_RESERVATION_SCHEMA}}, + }, ) def updatereservation(self, request, post_data): params = { "reservationIdentifier": mste.Uint32(post_data['reservation_id']), - "situation": mste.Uint32(RESERVATION_STATUS[post_data['status']]) + "situation": mste.Uint32(RESERVATION_STATUS[post_data['status']]), } extensions = get_extensions(post_data) if extensions: @@ -801,11 +787,7 @@ class PlanitechConnector(BaseResource): data = self._call_planitech(self.requests.post, 'updateReservation', params) if data.get('modificationStatus') != 'OK': raise APIError("Update reservation failed: %s" % data.get('modificationStatus')) - return { - 'data': { - 'raw_data': data - } - } + return {'data': {'raw_data': data}} def check_status(self): auth_url = urlparse.urljoin(self.url, 'auth') @@ -814,9 +796,11 @@ class PlanitechConnector(BaseResource): class Pairing(models.Model): - class Meta: - unique_together = (('resource', 'name_id'), ('resource', 'external_id'),) + unique_together = ( + ('resource', 'name_id'), + ('resource', 'external_id'), + ) resource = models.ForeignKey(PlanitechConnector, on_delete=models.CASCADE) name_id = models.CharField(blank=False, max_length=256) diff --git a/passerelle/contrib/planitech/mste.py b/passerelle/contrib/planitech/mste.py index a0397d92..e2679a5f 100644 --- a/passerelle/contrib/planitech/mste.py +++ b/passerelle/contrib/planitech/mste.py @@ -23,7 +23,7 @@ from django.utils import six ENCODE_TOKENS = { 'integer': 16, 'real': 19, - 'nil': 0, + 'nil': 0, 'true': 1, 'false': 2, 'emptyString': 3, @@ -64,7 +64,6 @@ class Uint32(int): class MSTEDecoder(object): - def __init__(self, data): self._idx = 4 self._keys = [] @@ -153,9 +152,8 @@ class MSTEDecoder(object): class ObjectStore(list): - def add(self, obj): - """ Add object in the store + """Add object in the store and return its reference """ ref = self.getref(obj) @@ -165,7 +163,7 @@ class ObjectStore(list): return ref def getref(self, obj): - """ Return the reference of obj, + """Return the reference of obj, None if the object is not in the store """ try: @@ -175,7 +173,6 @@ class ObjectStore(list): class MSTEEncoder(object): - def __init__(self, data): self._data = data self._stream = [] diff --git a/passerelle/contrib/rsa13/migrations/0001_initial.py b/passerelle/contrib/rsa13/migrations/0001_initial.py index 9d890963..8c4a75eb 100644 --- a/passerelle/contrib/rsa13/migrations/0001_initial.py +++ b/passerelle/contrib/rsa13/migrations/0001_initial.py @@ -17,18 +17,50 @@ class Migration(migrations.Migration): migrations.CreateModel( name='RSA13Resource', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), ('description', models.TextField(verbose_name='Description')), - ('basic_auth_username', models.CharField(blank=True, max_length=128, verbose_name='Basic authentication username')), - ('basic_auth_password', models.CharField(blank=True, max_length=128, verbose_name='Basic authentication password')), - ('client_certificate', models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS client certificate')), - ('trusted_certificate_authorities', models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs')), + ( + 'basic_auth_username', + models.CharField( + blank=True, max_length=128, verbose_name='Basic authentication username' + ), + ), + ( + 'basic_auth_password', + models.CharField( + blank=True, max_length=128, verbose_name='Basic authentication password' + ), + ), + ( + 'client_certificate', + models.FileField( + blank=True, null=True, upload_to='', verbose_name='TLS client certificate' + ), + ), + ( + 'trusted_certificate_authorities', + models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs'), + ), ('verify_cert', models.BooleanField(default=True, verbose_name='TLS verify certificates')), - ('http_proxy', models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy')), + ( + 'http_proxy', + models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'), + ), ('webservice_base_url', models.URLField(verbose_name='Webservice Base URL')), - ('users', models.ManyToManyField(blank=True, related_name='_rsa13resource_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_rsa13resource_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'RSA CD13', diff --git a/passerelle/contrib/rsa13/models.py b/passerelle/contrib/rsa13/models.py index 350bdf32..e0603919 100644 --- a/passerelle/contrib/rsa13/models.py +++ b/passerelle/contrib/rsa13/models.py @@ -357,6 +357,7 @@ class RSA13Resource(BaseResource, HTTPResource): return self.get('platform/%s/referent/' % platform_id, email=email, ip=ip) else: return self.post('platform/%s/referent/' % platform_id, email=email, ip=ip, json=post_data) + # BUG, methods and post are incompatible platform_referent.endpoint_info.methods.append('get') @@ -834,13 +835,11 @@ class RSA13Resource(BaseResource, HTTPResource): }, 'clos': OUI_NON_ENUM, }, - } + }, } ), ) - def platform_beneficiaire_action( - self, request, platform_id, beneficiary_id, email, ip=None - ): + def platform_beneficiaire_action(self, request, platform_id, beneficiary_id, email, ip=None): return self.get( 'platform/%s/beneficiaire/%s/action/' % (platform_id, beneficiary_id), email=email, @@ -950,7 +949,7 @@ class RSA13Resource(BaseResource, HTTPResource): 'properties': { 'montant': {'type': 'number'}, 'date': DATE_SCHEMA, - } + }, }, 'avis_pi': { 'type': 'object', @@ -958,14 +957,14 @@ class RSA13Resource(BaseResource, HTTPResource): 'montant': {'type': 'number'}, 'date': DATE_SCHEMA, 'avis': {'type': 'string'}, - } + }, }, 'avis_sai': { 'type': 'object', 'properties': { 'montant': {'type': 'number'}, 'date': DATE_SCHEMA, - } + }, }, 'clos': OUI_NON_ENUM, }, @@ -973,9 +972,7 @@ class RSA13Resource(BaseResource, HTTPResource): } ), ) - def platform_beneficiaire_fondsaide( - self, request, platform_id, beneficiary_id, email, ip=None - ): + def platform_beneficiaire_fondsaide(self, request, platform_id, beneficiary_id, email, ip=None): return self.get( 'platform/%s/beneficiaire/%s/fondsaide/' % (platform_id, beneficiary_id), email=email, @@ -1018,7 +1015,7 @@ class RSA13Resource(BaseResource, HTTPResource): "avis": {"type": "string"}, "date": DATE_SCHEMA, "montant": {"type": "number"}, - } + }, }, "budget": { "type": "object", @@ -1034,19 +1031,19 @@ class RSA13Resource(BaseResource, HTTPResource): "date_relance": DATE_SCHEMA, "num_versement": {"type": "integer"}, "reception": {"type": "string"}, - "type": {"type": "string"} - } - } + "type": {"type": "string"}, + }, + }, }, "nombre_versements": {"type": "integer"}, - } + }, }, "cloture": { "type": "object", "properties": { "date_cloture": DATE_SCHEMA, "date_relance": DATE_SCHEMA, - } + }, }, "code_tfi": {"type": "string"}, "decision_sai": { @@ -1054,15 +1051,15 @@ class RSA13Resource(BaseResource, HTTPResource): "properties": { "date": DATE_SCHEMA, "decision": {"type": "string"}, - "montant": {"type": "number"} - } + "montant": {"type": "number"}, + }, }, "demande": { "type": "object", "properties": { "date": DATE_SCHEMA, "montant": {"type": "number"}, - } + }, }, "id": {"type": "integer"}, "lib_tfi": {"type": "string"}, @@ -1072,10 +1069,10 @@ class RSA13Resource(BaseResource, HTTPResource): "date_decision": DATE_SCHEMA, "date_demande": DATE_SCHEMA, "decision": {"type": "string"}, - "montant": {"type": "string"} - } - } - } + "montant": {"type": "string"}, + }, + }, + }, } ), ) @@ -1128,9 +1125,7 @@ class RSA13Resource(BaseResource, HTTPResource): } ), ) - def platform_beneficiaire_affectation( - self, request, platform_id, beneficiary_id, email, ip=None - ): + def platform_beneficiaire_affectation(self, request, platform_id, beneficiary_id, email, ip=None): return self.get( 'platform/%s/beneficiaire/%s/affectation/' % (platform_id, beneficiary_id), email=email, @@ -1175,7 +1170,7 @@ class RSA13Resource(BaseResource, HTTPResource): 'properties': { 'nom': {'type': 'string'}, 'prenom': {'type': 'string'}, - } + }, }, 'code_pi': {'type': 'string'}, 'date_deb': DATE_SCHEMA, @@ -1206,7 +1201,7 @@ class RSA13Resource(BaseResource, HTTPResource): 'date_reelle': DATE_SCHEMA, 'resultat': {'type': 'string'}, 'lib_resultat': {'type': 'string'}, - } + }, }, 'fin': { 'type': 'object', @@ -1214,7 +1209,7 @@ class RSA13Resource(BaseResource, HTTPResource): 'date': DATE_SCHEMA, 'motif': {'type': 'string'}, 'lib_motif': {'type': 'string'}, - } + }, }, 'commentaire_ref': {'type': 'string'}, }, @@ -1232,9 +1227,7 @@ class RSA13Resource(BaseResource, HTTPResource): @endpoint( name='platform', - pattern=r'^(?P[0-9]{1,10})/' - r'beneficiaire/(?P[0-9]{1,10})/' - r'convo/$', + pattern=r'^(?P[0-9]{1,10})/' r'beneficiaire/(?P[0-9]{1,10})/' r'convo/$', example_pattern='{platform_id}/beneficiaire/{beneficiary_id}/convo/', description=_('Get beneficiary convocations'), perm='can_access', @@ -1265,7 +1258,7 @@ class RSA13Resource(BaseResource, HTTPResource): "properties": { "nombre": {"type": "integer"}, "motif": {"type": "string"}, - } + }, }, }, "derniere_consequence": { @@ -1273,16 +1266,14 @@ class RSA13Resource(BaseResource, HTTPResource): "properties": { "date": DATE_SCHEMA, "consequence": {"type": "string"}, - } - } - } + }, + }, + }, }, } ), ) - def platform_beneficiaire_convo( - self, request, platform_id, beneficiary_id, email, ip=None - ): + def platform_beneficiaire_convo(self, request, platform_id, beneficiary_id, email, ip=None): return self.get( 'platform/%s/beneficiaire/%s/convo/' % (platform_id, beneficiary_id), email=email, @@ -1336,9 +1327,7 @@ class RSA13Resource(BaseResource, HTTPResource): } ), ) - def platform_beneficiaire_emploi( - self, request, platform_id, beneficiary_id, email, ip=None - ): + def platform_beneficiaire_emploi(self, request, platform_id, beneficiary_id, email, ip=None): return self.get( 'platform/%s/beneficiaire/%s/emploi/' % (platform_id, beneficiary_id), email=email, diff --git a/passerelle/contrib/sigerly/migrations/0001_initial.py b/passerelle/contrib/sigerly/migrations/0001_initial.py index 73fa94a4..1acfe1d6 100644 --- a/passerelle/contrib/sigerly/migrations/0001_initial.py +++ b/passerelle/contrib/sigerly/migrations/0001_initial.py @@ -17,18 +17,54 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Sigerly', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), ('description', models.TextField(verbose_name='Description')), - ('basic_auth_username', models.CharField(blank=True, max_length=128, verbose_name='Basic authentication username')), - ('basic_auth_password', models.CharField(blank=True, max_length=128, verbose_name='Basic authentication password')), - ('client_certificate', models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS client certificate')), - ('trusted_certificate_authorities', models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs')), + ( + 'basic_auth_username', + models.CharField( + blank=True, max_length=128, verbose_name='Basic authentication username' + ), + ), + ( + 'basic_auth_password', + models.CharField( + blank=True, max_length=128, verbose_name='Basic authentication password' + ), + ), + ( + 'client_certificate', + models.FileField( + blank=True, null=True, upload_to='', verbose_name='TLS client certificate' + ), + ), + ( + 'trusted_certificate_authorities', + models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs'), + ), ('verify_cert', models.BooleanField(default=True, verbose_name='TLS verify certificates')), - ('http_proxy', models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy')), - ('base_url', models.CharField(help_text='example: https://sig.sigerly.fr/syecl_intervention/webservicev2/', max_length=256, verbose_name='Service URL')), - ('users', models.ManyToManyField(blank=True, related_name='_sigerly_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'http_proxy', + models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'), + ), + ( + 'base_url', + models.CharField( + help_text='example: https://sig.sigerly.fr/syecl_intervention/webservicev2/', + max_length=256, + verbose_name='Service URL', + ), + ), + ( + 'users', + models.ManyToManyField( + blank=True, related_name='_sigerly_users_+', related_query_name='+', to='base.ApiUser' + ), + ), ], options={ 'verbose_name': 'Sigerly', diff --git a/passerelle/contrib/solis_afi_mss/migrations/0001_initial.py b/passerelle/contrib/solis_afi_mss/migrations/0001_initial.py index b2ac2c35..05c655b3 100644 --- a/passerelle/contrib/solis_afi_mss/migrations/0001_initial.py +++ b/passerelle/contrib/solis_afi_mss/migrations/0001_initial.py @@ -17,18 +17,57 @@ class Migration(migrations.Migration): migrations.CreateModel( name='SolisAfiMss', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), ('description', models.TextField(verbose_name='Description')), - ('basic_auth_username', models.CharField(blank=True, max_length=128, verbose_name='Basic authentication username')), - ('basic_auth_password', models.CharField(blank=True, max_length=128, verbose_name='Basic authentication password')), - ('client_certificate', models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS client certificate')), - ('trusted_certificate_authorities', models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs')), + ( + 'basic_auth_username', + models.CharField( + blank=True, max_length=128, verbose_name='Basic authentication username' + ), + ), + ( + 'basic_auth_password', + models.CharField( + blank=True, max_length=128, verbose_name='Basic authentication password' + ), + ), + ( + 'client_certificate', + models.FileField( + blank=True, null=True, upload_to='', verbose_name='TLS client certificate' + ), + ), + ( + 'trusted_certificate_authorities', + models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs'), + ), ('verify_cert', models.BooleanField(default=True, verbose_name='TLS verify certificates')), - ('http_proxy', models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy')), - ('base_url', models.CharField(help_text='example: https://solis.mon-application.fr/api-mss-afi/', max_length=256, verbose_name='Service URL')), - ('users', models.ManyToManyField(blank=True, related_name='_solisafimss_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'http_proxy', + models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'), + ), + ( + 'base_url', + models.CharField( + help_text='example: https://solis.mon-application.fr/api-mss-afi/', + max_length=256, + verbose_name='Service URL', + ), + ), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_solisafimss_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'Solis (mss-afi)', diff --git a/passerelle/contrib/solis_afi_mss/models.py b/passerelle/contrib/solis_afi_mss/models.py index fc701401..ba9f66b3 100644 --- a/passerelle/contrib/solis_afi_mss/models.py +++ b/passerelle/contrib/solis_afi_mss/models.py @@ -47,7 +47,7 @@ TAX_SCHEMA = { 'description': 'Tax amount', 'type': 'string', }, - } + }, } DEMAND_SCHEMA = { @@ -69,7 +69,7 @@ DEMAND_SCHEMA = { 'individusConcernes': { 'description': "List of related family member indexes separated by ':'", 'type': 'string', - 'pattern': r'^[0-9 :]+$' + 'pattern': r'^[0-9 :]+$', }, 'dateDebut': { 'description': 'Start date (YYYY-MM-DD)"', @@ -83,15 +83,16 @@ DEMAND_SCHEMA = { 'description': 'Invoice amount', 'type': 'string', }, - } + }, } class SolisAfiMss(BaseResource, HTTPResource): base_url = models.CharField( - max_length=256, blank=False, + max_length=256, + blank=False, verbose_name=_('Service URL'), - help_text=_('example: https://solis.mon-application.fr/api-mss-afi/') + help_text=_('example: https://solis.mon-application.fr/api-mss-afi/'), ) category = _('Business Process Connectors') @@ -112,10 +113,11 @@ class SolisAfiMss(BaseResource, HTTPResource): json_content = response.json() except ValueError: json_content = None - raise APIError('error status:%s %r, content:%r' % - (response.status_code, response.reason, response.content[:1024]), - data={'status_code': response.status_code, - 'json_content': json_content}) + raise APIError( + 'error status:%s %r, content:%r' + % (response.status_code, response.reason, response.content[:1024]), + data={'status_code': response.status_code, 'json_content': json_content}, + ) if response.status_code == 204 or not response.content: # 204 No Content return None @@ -134,9 +136,9 @@ class SolisAfiMss(BaseResource, HTTPResource): return json_response def check_status(self): - ''' + """ Raise an exception if something goes wrong. - ''' + """ return self.request('main/isAlive/') def search_from_email(self, email): @@ -155,57 +157,72 @@ class SolisAfiMss(BaseResource, HTTPResource): return index, adults, children @endpoint( - display_category=_('Agent'), display_order=1, - perm='can_access', methods=['get'], + display_category=_('Agent'), + display_order=1, + perm='can_access', + methods=['get'], description=_('Retrieve family composition'), parameters={ 'email': {'description': _("Agent's email address")}, - }) + }, + ) def family(self, request, email): adults, children = self.search_from_email(email)[1:] return {'data': adults + children} @endpoint( - display_category=_('Agent'), display_order=2, - perm='can_access', methods=['get'], + display_category=_('Agent'), + display_order=2, + perm='can_access', + methods=['get'], description=_('Retrieve agent'), parameters={ 'email': {'description': _("Agent's email address")}, - }) + }, + ) def agent(self, request, email): index, adults = self.search_from_email(email)[:2] return {'data': [x for x in adults if x['id'] == index]} @endpoint( - display_category=_('Agent'), display_order=3, - perm='can_access', methods=['get'], + display_category=_('Agent'), + display_order=3, + perm='can_access', + methods=['get'], description=_('Retrieve adults from family composition'), parameters={ 'email': {'description': _("Agent's email address")}, - }) + }, + ) def adults(self, request, email): adults = self.search_from_email(email)[1] return {'data': adults} @endpoint( - display_category=_('Agent'), display_order=4, - perm='can_access', methods=['get'], + display_category=_('Agent'), + display_order=4, + perm='can_access', + methods=['get'], description=_('Retrieve children from family composition'), parameters={ 'email': {'description': _("Agent's email address")}, - }) + }, + ) def children(self, request, email): children = self.search_from_email(email)[2] return {'data': children} @endpoint( - display_category=_('Budget'), display_order=1, - perm='can_access', methods=['get'], + display_category=_('Budget'), + display_order=1, + perm='can_access', + methods=['get'], description=_('Retrieve the list of charges for an agent'), parameters={ 'email': {'description': _("Agent's email address")}, 'year': {'description': _('Year of taxation (YYYY)')}, - }) + }, + ) def taxes(self, request, email, year=None): index = self.search_from_email(email)[0] params = {'indexAgent': str(index)} @@ -224,10 +241,14 @@ class SolisAfiMss(BaseResource, HTTPResource): return {'data': data} @endpoint( - display_category=_('Budget'), display_order=2, - name='declare-tax', perm='can_access', methods=['post'], + display_category=_('Budget'), + display_order=2, + name='declare-tax', + perm='can_access', + methods=['post'], description=_("Register an agent's tax for one year"), - post={'request_body': {'schema': {'application/json': TAX_SCHEMA}}}) + post={'request_body': {'schema': {'application/json': TAX_SCHEMA}}}, + ) def declare_tax(self, request, post_data): email = post_data.pop('email') post_data['indexAgent'] = str(self.search_from_email(email)[0]) @@ -235,14 +256,17 @@ class SolisAfiMss(BaseResource, HTTPResource): return {'data': response} @endpoint( - display_category=_('Budget'), display_order=3, - name='simulate-quotient', perm='can_access', methods=['get'], - description=_( - 'Simulate the calculation of a Quotient from the tax amount and the number of shares'), + display_category=_('Budget'), + display_order=3, + name='simulate-quotient', + perm='can_access', + methods=['get'], + description=_('Simulate the calculation of a Quotient from the tax amount and the number of shares'), parameters={ 'email': {'description': _("Agent's email address")}, 'year': {'description': _('Year of taxation (YYYY)')}, - }) + }, + ) def simulate_quotient(self, request, code, nb_parts, amount): params = { 'codeCalcul': code, @@ -253,13 +277,16 @@ class SolisAfiMss(BaseResource, HTTPResource): return {'data': response} @endpoint( - display_category=_('Allowance'), display_order=1, - perm='can_access', methods=['get'], + display_category=_('Allowance'), + display_order=1, + perm='can_access', + methods=['get'], description=_('Retrieve the list of allowance from an agent'), parameters={ 'email': {'description': _("Agent's email address")}, 'text_template': {'description': _('Text template')}, - }) + }, + ) def helps(self, request, email): params = {'indexAgent': str(self.search_from_email(email)[0])} response = self.request('afi/aide/getAidesParAgent/', params=params) @@ -270,18 +297,21 @@ class SolisAfiMss(BaseResource, HTTPResource): return {'data': response['aidesFinancieres']} @endpoint( - display_category=_('Allowance'), display_order=2, - name='demand-help', perm='can_access', methods=['post'], + display_category=_('Allowance'), + display_order=2, + name='demand-help', + perm='can_access', + methods=['post'], description=_('Submit allowance for an agent'), - post={'request_body': {'schema': {'application/json': DEMAND_SCHEMA}}}) + post={'request_body': {'schema': {'application/json': DEMAND_SCHEMA}}}, + ) def demand_help(self, request, post_data): email = post_data.pop('email') index_agent, adults, children = self.search_from_email(email) related_persons = [] for person in adults + children: - for index in [ - x.strip() for x in post_data['individusConcernes'].split(':') if x.strip()]: + for index in [x.strip() for x in post_data['individusConcernes'].split(':') if x.strip()]: if str(person['indexIndividu']) == index: related_persons.append({"indexIndividu": index}) diff --git a/passerelle/contrib/solis_apa/conciliation.py b/passerelle/contrib/solis_apa/conciliation.py index e233cd92..ace293e5 100644 --- a/passerelle/contrib/solis_apa/conciliation.py +++ b/passerelle/contrib/solis_apa/conciliation.py @@ -16,199 +16,199 @@ CONCILIATION_INDIVIDU = { - 'block': { - 'name': 'Individu', - 'pk': "PK/IndexIndividu/@V" + 'block': {'name': 'Individu', 'pk': "PK/IndexIndividu/@V"}, + 'criteria': { + 5: ( + { + 'EtatCivil/Nom/@V': 'equal', + 'EtatCivil/Prenom/@V': 'equal', + 'EtatCivil/DateNaissance/@V': 'equal', }, - 'criteria': { - 5: ({ - 'EtatCivil/Nom/@V': 'equal', - 'EtatCivil/Prenom/@V': 'equal', - 'EtatCivil/DateNaissance/@V': 'equal', - }, - { - 'EtatCivil/NomJeuneFille/@V': 'equal', - 'EtatCivil/Prenom/@V': 'equal', - 'EtatCivil/DateNaissance/@V': 'equal', - },), - 4: ({ - 'EtatCivil/Nom/@V': 'equal', - 'EtatCivil/DateNaissance/@V': 'equal', - }, - { - 'EtatCivil/NomJeuneFille/@V': 'equal', - 'EtatCivil/DateNaissance/@V': 'equal', - }, - { - 'EtatCivil/Nom/@V': 'equal', - 'EtatCivil/Prenom/@V': 'equal', - }, - { - 'EtatCivil/NomJeuneFille/@V': 'equal', - 'EtatCivil/Prenom/@V': 'equal', - }, - ), + { + 'EtatCivil/NomJeuneFille/@V': 'equal', + 'EtatCivil/Prenom/@V': 'equal', + 'EtatCivil/DateNaissance/@V': 'equal', }, - 'input': { - 'EtatCivil/Nom/@V': 'nom', - 'EtatCivil/Prenom/@V': 'prenom', - 'EtatCivil/DateNaissance/@V': 'dn', - 'EtatCivil/NomJeuneFille/@V': 'nom', + ), + 4: ( + { + 'EtatCivil/Nom/@V': 'equal', + 'EtatCivil/DateNaissance/@V': 'equal', }, - 'output': [ - "Dossier/PK/IndexDossier/@V", - "PK/IndexIndividu/@V", - "EtatCivil/Nom/@V", - "EtatCivil/NomJeuneFille/@V", - "EtatCivil/Prenom/@V", - "EtatCivil/DateNaissance/@V", - "Dossier/Adresse/NumeroLieu/@V", - "Dossier/Adresse/NatureLieu/@Lc", - "Dossier/Adresse/NomLieu/@V", - "Dossier/Adresse/ComplementLieu/@V", - "Dossier/Adresse/CpLieu/@V", - "Dossier/Adresse/Commune/NomCom/@V" - ] - } + { + 'EtatCivil/NomJeuneFille/@V': 'equal', + 'EtatCivil/DateNaissance/@V': 'equal', + }, + { + 'EtatCivil/Nom/@V': 'equal', + 'EtatCivil/Prenom/@V': 'equal', + }, + { + 'EtatCivil/NomJeuneFille/@V': 'equal', + 'EtatCivil/Prenom/@V': 'equal', + }, + ), + }, + 'input': { + 'EtatCivil/Nom/@V': 'nom', + 'EtatCivil/Prenom/@V': 'prenom', + 'EtatCivil/DateNaissance/@V': 'dn', + 'EtatCivil/NomJeuneFille/@V': 'nom', + }, + 'output': [ + "Dossier/PK/IndexDossier/@V", + "PK/IndexIndividu/@V", + "EtatCivil/Nom/@V", + "EtatCivil/NomJeuneFille/@V", + "EtatCivil/Prenom/@V", + "EtatCivil/DateNaissance/@V", + "Dossier/Adresse/NumeroLieu/@V", + "Dossier/Adresse/NatureLieu/@Lc", + "Dossier/Adresse/NomLieu/@V", + "Dossier/Adresse/ComplementLieu/@V", + "Dossier/Adresse/CpLieu/@V", + "Dossier/Adresse/Commune/NomCom/@V", + ], +} CONCILIATION_INDIVIDU_SANS_DN = { - 'block': { - 'name': 'Individu', - 'pk': "PK/IndexIndividu/@V" + 'block': {'name': 'Individu', 'pk': "PK/IndexIndividu/@V"}, + 'criteria': { + 5: ( + { + 'EtatCivil/Nom/@V': 'equal', + 'EtatCivil/Prenom/@V': 'equal', }, - 'criteria': { - 5: ({ - 'EtatCivil/Nom/@V': 'equal', - 'EtatCivil/Prenom/@V': 'equal', - },{ - 'EtatCivil/NomJeuneFille/@V': 'equal', - 'EtatCivil/Prenom/@V': 'equal', - },), - 4: ({ - 'EtatCivil/Nom/@V': 'equal', - 'EtatCivil/Prenom/@V': 'approx', - },), - 3: ({ - 'EtatCivil/Nom/@V': 'approx', - 'EtatCivil/Prenom/@V': 'equal', - },), + { + 'EtatCivil/NomJeuneFille/@V': 'equal', + 'EtatCivil/Prenom/@V': 'equal', }, - 'input': { - 'EtatCivil/Nom/@V': 'nom', - 'EtatCivil/Prenom/@V': 'prenom', - 'EtatCivil/NomJeuneFille/@V': 'nom', + ), + 4: ( + { + 'EtatCivil/Nom/@V': 'equal', + 'EtatCivil/Prenom/@V': 'approx', }, - 'output': [ - "Dossier/PK/IndexDossier/@V", - "PK/IndexIndividu/@V", - "EtatCivil/Nom/@V", - "EtatCivil/NomJeuneFille/@V", - "EtatCivil/Prenom/@V", - "EtatCivil/DateNaissance/@V", - "Dossier/Adresse/NumeroLieu/@V", - "Dossier/Adresse/NatureLieu/@Lc", - "Dossier/Adresse/NomLieu/@V", - "Dossier/Adresse/ComplementLieu/@V", - "Dossier/Adresse/CpLieu/@V", - "Dossier/Adresse/Commune/NomCom/@V" - ] - } + ), + 3: ( + { + 'EtatCivil/Nom/@V': 'approx', + 'EtatCivil/Prenom/@V': 'equal', + }, + ), + }, + 'input': { + 'EtatCivil/Nom/@V': 'nom', + 'EtatCivil/Prenom/@V': 'prenom', + 'EtatCivil/NomJeuneFille/@V': 'nom', + }, + 'output': [ + "Dossier/PK/IndexDossier/@V", + "PK/IndexIndividu/@V", + "EtatCivil/Nom/@V", + "EtatCivil/NomJeuneFille/@V", + "EtatCivil/Prenom/@V", + "EtatCivil/DateNaissance/@V", + "Dossier/Adresse/NumeroLieu/@V", + "Dossier/Adresse/NatureLieu/@Lc", + "Dossier/Adresse/NomLieu/@V", + "Dossier/Adresse/ComplementLieu/@V", + "Dossier/Adresse/CpLieu/@V", + "Dossier/Adresse/Commune/NomCom/@V", + ], +} CONCILIATION_ADRESSE = { - 'block': { - 'name': 'Adresse', - 'pk': "CodeLieu/@V" + 'block': {'name': 'Adresse', 'pk': "CodeLieu/@V"}, + 'criteria': { + 5: ( + { + 'NomLieu/@V': 'matches', + 'Commune/PK/CodeDepartement/@V': 'equal', + 'Commune/PK/CodeCommune/@V': 'equal', }, - 'criteria': { - 5: ({ - 'NomLieu/@V': 'matches', - 'Commune/PK/CodeDepartement/@V': 'equal', - 'Commune/PK/CodeCommune/@V': 'equal', - },), - }, - 'input': { - 'NomLieu/@V': 'lieu', - 'Commune/PK/CodeDepartement/@V': 'departement', - 'Commune/PK/CodeCommune/@V': 'commune', - }, - 'output': [ - "CodeLieu/@V", - "NatureLieu/@Lc", - "NomLieu/@V", - "CodePostal/@V", - "Commune/PK/CodeCommune/@V", - "Commune/NomCom/@V", - "CodeDepartement/@V", - ] - } + ), + }, + 'input': { + 'NomLieu/@V': 'lieu', + 'Commune/PK/CodeDepartement/@V': 'departement', + 'Commune/PK/CodeCommune/@V': 'commune', + }, + 'output': [ + "CodeLieu/@V", + "NatureLieu/@Lc", + "NomLieu/@V", + "CodePostal/@V", + "Commune/PK/CodeCommune/@V", + "Commune/NomCom/@V", + "CodeDepartement/@V", + ], +} CONCILIATION_PARTICULIER = { - 'block': { - 'name': 'Particulier', - 'pk': "PK/IndexParticulier/@V" + 'block': {'name': 'Particulier', 'pk': "PK/IndexParticulier/@V"}, + 'criteria': { + 5: ( + { + 'EtatCivil/Nom/@V': 'equal', + 'EtatCivil/Prenom/@V': 'equal', }, - 'criteria': { - 5: ({ - 'EtatCivil/Nom/@V': 'equal', - 'EtatCivil/Prenom/@V': 'equal', - },{ - 'EtatCivil/NomJeuneFille/@V': 'equal', - 'EtatCivil/Prenom/@V': 'equal', - },), - 4: ({ - 'EtatCivil/Nom/@V': 'equal', - 'EtatCivil/Prenom/@V': 'approx', - },), - 3: ({ - 'EtatCivil/Nom/@V': 'approx', - 'EtatCivil/Prenom/@V': 'equal', - },), + { + 'EtatCivil/NomJeuneFille/@V': 'equal', + 'EtatCivil/Prenom/@V': 'equal', }, - 'input': { - 'EtatCivil/Nom/@V': 'nom', - 'EtatCivil/Prenom/@V': 'prenom', - 'EtatCivil/NomJeuneFille/@V': 'nom', + ), + 4: ( + { + 'EtatCivil/Nom/@V': 'equal', + 'EtatCivil/Prenom/@V': 'approx', }, - 'output': [ - "PK/IndexParticulier/@V", - "EtatCivil/Nom/@V", - "EtatCivil/NomJeuneFille/@V", - "EtatCivil/Prenom/@V", - "EtatCivil/DateNaissance/@V", - "Adresse/NumeroLieu/@V", - "Adresse/NatureLieu/@Lc", - "Adresse/NomLieu/@V", - "Adresse/ComplementLieu/@V", - "Adresse/CpLieu/@V", - "Adresse/Commune/NomCom/@V" - ] - } + ), + 3: ( + { + 'EtatCivil/Nom/@V': 'approx', + 'EtatCivil/Prenom/@V': 'equal', + }, + ), + }, + 'input': { + 'EtatCivil/Nom/@V': 'nom', + 'EtatCivil/Prenom/@V': 'prenom', + 'EtatCivil/NomJeuneFille/@V': 'nom', + }, + 'output': [ + "PK/IndexParticulier/@V", + "EtatCivil/Nom/@V", + "EtatCivil/NomJeuneFille/@V", + "EtatCivil/Prenom/@V", + "EtatCivil/DateNaissance/@V", + "Adresse/NumeroLieu/@V", + "Adresse/NatureLieu/@Lc", + "Adresse/NomLieu/@V", + "Adresse/ComplementLieu/@V", + "Adresse/CpLieu/@V", + "Adresse/Commune/NomCom/@V", + ], +} def conciliation_payload(config, **data): - block = { - "name": config['block']['name'], - "PrimaryKey": { "key": [ config['block']['pk'] ] } - } + block = {"name": config['block']['name'], "PrimaryKey": {"key": [config['block']['pk']]}} setting = [] for affinity, afflist in config['criteria'].items(): for aff in afflist: criterium = [] for xpath, op in aff.items(): - criterium.append({ - 'key': xpath, - 'operator': op - }) - setting.append({ - "affinity": affinity, - "Criterium": criterium, - }) + criterium.append({'key': xpath, 'operator': op}) + setting.append( + { + "affinity": affinity, + "Criterium": criterium, + } + ) criterium = [] for xpath, local in config['input'].items(): - criterium.append({ - 'key': xpath, - 'value': data.get(local, u'') - }) + criterium.append({'key': xpath, 'value': data.get(local, u'')}) returndata = [] for xpath in config['output']: returndata.append(xpath) @@ -217,12 +217,17 @@ def conciliation_payload(config, **data): "ConciliationInputWS": { "Block": block, "Input": { - "Settings": { "Setting": setting, }, - "Criteria": { "Criterium": criterium }, + "Settings": { + "Setting": setting, }, - "Output": { "ReturnDatas": { "returnData": returndata }, }, - } + "Criteria": {"Criterium": criterium}, + }, + "Output": { + "ReturnDatas": {"returnData": returndata}, + }, } + } + def conciliation_output2dict(config, entity): d = {} diff --git a/passerelle/contrib/solis_apa/integration.py b/passerelle/contrib/solis_apa/integration.py index c62aa840..fb68b493 100644 --- a/passerelle/contrib/solis_apa/integration.py +++ b/passerelle/contrib/solis_apa/integration.py @@ -36,7 +36,7 @@ def build_message(data): protection = get_protection(fields, wf) if protection: - message['MesureProtection'] = protection + message['MesureProtection'] = protection demandeur = get_demandeur(fields, wf) if demandeur: @@ -63,10 +63,12 @@ def build_message(data): 'statutDemande': statut_demande, 'dateDepot': data['receipt_time'][0:10], 'dateArrivee': data['receipt_time'][0:10], - 'dateCompletude': date_completude + 'dateCompletude': date_completude, } - demande_apa_nomenclature = {'codeNature':1, 'codeType': 68} if etablissement else {'codeNature': 2, 'codeType': 70} + demande_apa_nomenclature = ( + {'codeNature': 1, 'codeType': 68} if etablissement else {'codeNature': 2, 'codeType': 70} + ) demande_apa['Nomenclature'] = demande_apa_nomenclature @@ -76,8 +78,13 @@ def build_message(data): return message + def get_protection(fields, wf): - protection_type = wf.get('supp2_var_typeprotectionorga') or fields.get('typeprotectionorga') or fields.get('typeprotectionpers') + protection_type = ( + wf.get('supp2_var_typeprotectionorga') + or fields.get('typeprotectionorga') + or fields.get('typeprotectionpers') + ) if protection_type: protection_type = protection_type.strip().upper() if 'TUTELLE' in protection_type: @@ -91,7 +98,11 @@ def get_protection(fields, wf): else: protection_type = None - protection_organisme = wf.get('tutelle_var_organisme_raw') or wf.get('supp2_var_organisme_raw') or fields.get('papier_organisme_raw') + protection_organisme = ( + wf.get('tutelle_var_organisme_raw') + or wf.get('supp2_var_organisme_raw') + or fields.get('papier_organisme_raw') + ) if protection_organisme: protection_organisme = protection_organisme.split('-')[1] else: @@ -105,6 +116,7 @@ def get_protection(fields, wf): return message + def get_demandeur(fields, wf): protection = get_protection(fields, wf) @@ -150,16 +162,14 @@ def get_demandeur(fields, wf): if protection_type is not None and protection_organisme is None: message['typeProtection'] = protection_type - message.update({ - 'nom': demandeur_nom, - 'prenom': demandeur_prenom, - 'sexe': demandeur_sexe, - 'Adresse': { - 'codeDepartement': 14, - 'codeCommune': 990, - 'codeLieu': '9999' + message.update( + { + 'nom': demandeur_nom, + 'prenom': demandeur_prenom, + 'sexe': demandeur_sexe, + 'Adresse': {'codeDepartement': 14, 'codeCommune': 990, 'codeLieu': '9999'}, } - }) + ) contact = {'email': demandeur_email} @@ -172,8 +182,11 @@ def get_demandeur(fields, wf): return None + def get_etablissement(fields, wf): - etablissement = wf.get('etablissement_var_nometablissement_raw') or fields.get('papier_nometablissement_raw') + etablissement = wf.get('etablissement_var_nometablissement_raw') or fields.get( + 'papier_nometablissement_raw' + ) if etablissement: etablissement = etablissement.split('-')[1] etablissement_date_entree = fields.get('date_etablissement') or '' @@ -184,6 +197,7 @@ def get_etablissement(fields, wf): return (etablissement, etablissement_date_entree) + def get_beneficiaire(fields, wf): beneficiaire_id = None if not wf.get('homonymie_var_homonyme_new_user_raw'): @@ -191,7 +205,9 @@ def get_beneficiaire(fields, wf): if not beneficiaire_id: beneficiaire_id = wf.get('homonymie_var_homonyme_id_raw') - beneficiaire_civilite = wf.get('supp3_var_civilitebeneficiaire') or fields.get('civilitebeneficiaire') or '' + beneficiaire_civilite = ( + wf.get('supp3_var_civilitebeneficiaire') or fields.get('civilitebeneficiaire') or '' + ) beneficiaire_civilite = beneficiaire_civilite.strip().upper() if 'MONSIEUR' in beneficiaire_civilite: beneficiaire_sexe = 'H' @@ -215,7 +231,9 @@ def get_beneficiaire(fields, wf): beneficiaire_ln = wf.get('supp3_var_lieunbeneficiaire') or fields.get('lieunbeneficiaire') or '' beneficiaire_ln = beneficiaire_ln.strip().upper() - beneficiaire_nationalite = wf.get('supp3_var_nationalitebeneficiaire') or fields.get('nationalitebeneficiaire') or 'NATIONAL' + beneficiaire_nationalite = ( + wf.get('supp3_var_nationalitebeneficiaire') or fields.get('nationalitebeneficiaire') or 'NATIONAL' + ) beneficiaire_nationalite = beneficiaire_nationalite.strip().upper() if 'NATIONAL' in beneficiaire_nationalite: beneficiaire_nationalite = 1 @@ -224,7 +242,9 @@ def get_beneficiaire(fields, wf): else: beneficiaire_nationalite = 3 - beneficiaire_situation = wf.get('supp3_var_situationfamillebeneficiaire') or fields.get('situationfamillebeneficiaire') or '' + beneficiaire_situation = ( + wf.get('supp3_var_situationfamillebeneficiaire') or fields.get('situationfamillebeneficiaire') or '' + ) beneficiaire_situation = beneficiaire_situation.strip().upper() if 'MARI' in beneficiaire_situation: beneficiaire_situation = 2 @@ -242,18 +262,36 @@ def get_beneficiaire(fields, wf): beneficiaire_tel = wf.get('supp3_var_telbeneficiaire') or fields.get('telbeneficiaire') or '' beneficiaire_email = wf.get('supp3_var_courrielbeneficiaire') or fields.get('courrielbeneficiaire') or '' - beneficiaire_code_commune_raw = wf.get('adresse_var_code_commune_raw') or fields.get('papier_excode_commune_raw') or fields.get('papier_code_commune_raw') or 'commune-0-0' + beneficiaire_code_commune_raw = ( + wf.get('adresse_var_code_commune_raw') + or fields.get('papier_excode_commune_raw') + or fields.get('papier_code_commune_raw') + or 'commune-0-0' + ) beneficiaire_code_commune = beneficiaire_code_commune_raw.split('-')[2] beneficiaire_code_departement = beneficiaire_code_commune_raw.split('-')[1] - beneficiaire_numero_lieu = wf.get('adresse_var_num_lieu') or fields.get('papier_exnum_lieu') or fields.get('papier_num_lieu') - beneficiaire_code_lieu = wf.get('adresse_var_code_lieu_raw') or fields.get('papier_excode_lieu_raw') or fields.get('papier_code_lieu_raw') + beneficiaire_numero_lieu = ( + wf.get('adresse_var_num_lieu') or fields.get('papier_exnum_lieu') or fields.get('papier_num_lieu') + ) + beneficiaire_code_lieu = ( + wf.get('adresse_var_code_lieu_raw') + or fields.get('papier_excode_lieu_raw') + or fields.get('papier_code_lieu_raw') + ) - beneficiaire_retraite = wf.get('supp3_var_retraitebeneficiaire') or fields.get('retraitebeneficiaire') or '' + beneficiaire_retraite = ( + wf.get('supp3_var_retraitebeneficiaire') or fields.get('retraitebeneficiaire') or '' + ) beneficiaire_retraite = beneficiaire_retraite.strip().upper() beneficiaire_retraite = 'OUI' in beneficiaire_retraite if beneficiaire_retraite: - info = wf.get('supp3_var_regretraitebeneficiaire_raw') or wf.get('retraite_var_listeretraite_raw') or fields.get('papier_listeretraite_raw') or 'organisme_retraite-0' + info = ( + wf.get('supp3_var_regretraitebeneficiaire_raw') + or wf.get('retraite_var_listeretraite_raw') + or fields.get('papier_listeretraite_raw') + or 'organisme_retraite-0' + ) beneficiaire_retraite_code = info.split('-')[1] message = {} @@ -266,18 +304,12 @@ def get_beneficiaire(fields, wf): if beneficiaire_nomnaissance: message['nomJeuneFille'] = beneficiaire_nomnaissance - message.update({ - 'sexe': beneficiaire_sexe, - 'dateNaissance': beneficiaire_dn - }) + message.update({'sexe': beneficiaire_sexe, 'dateNaissance': beneficiaire_dn}) if beneficiaire_ln: message['lieuNaissance'] = beneficiaire_ln - message.update({ - 'nationalite': beneficiaire_nationalite, - 'situationFamiliale': beneficiaire_situation - }) + message.update({'nationalite': beneficiaire_nationalite, 'situationFamiliale': beneficiaire_situation}) if beneficiaire_tel or beneficiaire_email: contact = {} @@ -293,22 +325,23 @@ def get_beneficiaire(fields, wf): etablissement, etablissement_date_entree = get_etablissement(fields, wf) if etablissement: - message.update({ - 'indexEtablissementAccueil': int(etablissement), - 'dateInstallBeneficiaire': etablissement_date_entree - }) + message.update( + { + 'indexEtablissementAccueil': int(etablissement), + 'dateInstallBeneficiaire': etablissement_date_entree, + } + ) if beneficiaire_retraite: - message.update({ - 'Retraite': { - 'codeOrganismeRetraite': int(beneficiaire_retraite_code), - 'periodicite': 4, - 'Nomenclature': { - 'indexFamille': 7, - 'indexNature': 2 + message.update( + { + 'Retraite': { + 'codeOrganismeRetraite': int(beneficiaire_retraite_code), + 'periodicite': 4, + 'Nomenclature': {'indexFamille': 7, 'indexNature': 2}, } } - }) + ) adresse = { 'codeCommune': int(beneficiaire_code_commune), @@ -324,6 +357,7 @@ def get_beneficiaire(fields, wf): return message + def get_conjoint(fields, wf, beneficiaire_situation): conjoint_id = None if not wf.get('homonymie_conjoint_var_homonyme_new_user_raw'): @@ -343,7 +377,9 @@ def get_conjoint(fields, wf, beneficiaire_situation): if conjoint_nom == '': conjoint_nom = None - conjoint_nomnaissance = wf.get('supp5_var_nomnaissanceconjoint') or fields.get('nomnaissanceconjoint') or '' + conjoint_nomnaissance = ( + wf.get('supp5_var_nomnaissanceconjoint') or fields.get('nomnaissanceconjoint') or '' + ) conjoint_nomnaissance = conjoint_nomnaissance.strip().upper() if conjoint_nomnaissance == '': conjoint_nomnaissance = None @@ -354,7 +390,9 @@ def get_conjoint(fields, wf, beneficiaire_situation): conjoint_dn = wf.get('supp5_var_dnnconjoint_raw') or fields.get('dnnconjoint') or '' conjoint_dn = conjoint_dn[0:10] - conjoint_participation = wf.get('supp5_var_participationconjoint') or fields.get('participationconjoint') or 'NON' + conjoint_participation = ( + wf.get('supp5_var_participationconjoint') or fields.get('participationconjoint') or 'NON' + ) conjoint_participation = 'OUI' in conjoint_participation.strip().upper() if not conjoint_nom: @@ -365,23 +403,28 @@ def get_conjoint(fields, wf, beneficiaire_situation): if conjoint_id: message['indexIndividu'] = conjoint_id - message.update({ - 'nom': conjoint_nom, - 'prenom': conjoint_prenom, - }) + message.update( + { + 'nom': conjoint_nom, + 'prenom': conjoint_prenom, + } + ) if conjoint_nomnaissance: message['nomJeuneFille'] = conjoint_nomnaissance - message.update({ - 'dateNaissance': conjoint_dn, - 'sexe': conjoint_sexe, - 'bParticipeRevenus': conjoint_participation, - 'situationFamiliale': beneficiaire_situation - }) + message.update( + { + 'dateNaissance': conjoint_dn, + 'sexe': conjoint_sexe, + 'bParticipeRevenus': conjoint_participation, + 'situationFamiliale': beneficiaire_situation, + } + ) return message + def get_revenu(fields, wf): salaire = wf.get('supp6_var_revenuSalaire') or fields.get('revenuSalaire') try: @@ -396,7 +439,9 @@ def get_revenu(fields, wf): revenus = salaire + retraite revenus_annee = wf.get('supp6_var_anneerefrevenu') or fields.get('anneerefrevenu') - conjoint_participation = wf.get('supp5_var_participationconjoint') or fields.get('participationconjoint') or 'NON' + conjoint_participation = ( + wf.get('supp5_var_participationconjoint') or fields.get('participationconjoint') or 'NON' + ) conjoint_participation = 'OUI' in conjoint_participation.strip().upper() if conjoint_participation: @@ -415,13 +460,11 @@ def get_revenu(fields, wf): if not revenus_annee: return None - message = { - 'anneeReference': int(revenus_annee), - 'revenuReference': revenus - } + message = {'anneeReference': int(revenus_annee), 'revenuReference': revenus} return message + def get_info_bancaire(fields, wf): banque_titulaire = wf.get('supp8_var_titulairecompte') or fields.get('titulairecompte') banque_domiciliation = wf.get('supp8_var_domicilebanque') or fields.get('domicilebanque') @@ -437,9 +480,10 @@ def get_info_bancaire(fields, wf): "codeGuichet": banque_codeguichet, "numeroCompte": banque_numero, "cleRib": banque_cle, - "modeReglement": 1 + "modeReglement": 1, } + def get_patrimoine(fields, wf): try: immobilier_bati = float(wf.get('supp7_var_bienbatis') or fields.get('bienbatis')) @@ -456,7 +500,9 @@ def get_patrimoine(fields, wf): except: prelevements = 0.0 try: - prelevements = prelevements + float(wf.get('supp6_var_revenuPrelevementConjoint') or fields.get('revenuPrelevementConjoint')) + prelevements = prelevements + float( + wf.get('supp6_var_revenuPrelevementConjoint') or fields.get('revenuPrelevementConjoint') + ) except: pass @@ -465,7 +511,9 @@ def get_patrimoine(fields, wf): except: fonciers = 0.0 try: - fonciers = fonciers + float(wf.get('supp6_var_revenuFoncierConjoint') or fields.get('revenuFoncierConjoint', 0.0)) + fonciers = fonciers + float( + wf.get('supp6_var_revenuFoncierConjoint') or fields.get('revenuFoncierConjoint', 0.0) + ) except: pass fonciers_annee = wf.get('supp7_var_anneefoncier') or fields.get('anneefoncier') @@ -475,43 +523,39 @@ def get_patrimoine(fields, wf): message = [] if immobilier_bati > 0 and revenus_annee: - message.append({ - "Nomenclature": { - "indexFamille": 2, - "indexNature": 1 - }, - "anneeReference": int(revenus_annee), - "valeurPrelevement": immobilier_bati - }) + message.append( + { + "Nomenclature": {"indexFamille": 2, "indexNature": 1}, + "anneeReference": int(revenus_annee), + "valeurPrelevement": immobilier_bati, + } + ) if immobilier_non_bati > 0 and revenus_annee: - message.append({ - "Nomenclature": { - "indexFamille": 2, - "indexNature": 2 - }, - "anneeReference": int(revenus_annee), - "valeurPrelevement": immobilier_non_bati - }) + message.append( + { + "Nomenclature": {"indexFamille": 2, "indexNature": 2}, + "anneeReference": int(revenus_annee), + "valeurPrelevement": immobilier_non_bati, + } + ) if prelevements > 0 and revenus_annee: - message.append({ - "Nomenclature": { - "indexFamille": 3, - "indexNature": 1 - }, - "anneeReference": int(revenus_annee), - "valeurPrelevement": prelevements - }) + message.append( + { + "Nomenclature": {"indexFamille": 3, "indexNature": 1}, + "anneeReference": int(revenus_annee), + "valeurPrelevement": prelevements, + } + ) if fonciers > 0 and fonciers_annee: - message.append({ - "Nomenclature": { - "indexFamille": 4, - "indexNature": 1 - }, - "anneeReference": int(fonciers_annee), - "valeurPrelevement": fonciers - }) + message.append( + { + "Nomenclature": {"indexFamille": 4, "indexNature": 1}, + "anneeReference": int(fonciers_annee), + "valeurPrelevement": fonciers, + } + ) return message diff --git a/passerelle/contrib/solis_apa/migrations/0001_initial.py b/passerelle/contrib/solis_apa/migrations/0001_initial.py index c8f03e49..f914a626 100644 --- a/passerelle/contrib/solis_apa/migrations/0001_initial.py +++ b/passerelle/contrib/solis_apa/migrations/0001_initial.py @@ -14,17 +14,55 @@ class Migration(migrations.Migration): migrations.CreateModel( name='SolisAPA', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), + ( + 'log_level', + models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), ('base_url', models.CharField(max_length=128, verbose_name='url')), - ('verify_cert', models.BooleanField(default=True, verbose_name='Check HTTPS Certificate validity')), + ( + 'verify_cert', + models.BooleanField(default=True, verbose_name='Check HTTPS Certificate validity'), + ), ('username', models.CharField(max_length=128, verbose_name='Username', blank=True)), ('password', models.CharField(max_length=128, verbose_name='Password', blank=True)), - ('keystore', models.FileField(help_text='Certificate and private key in PEM format', upload_to=b'solis_apa', null=True, verbose_name='Keystore', blank=True)), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_solisapa_users_+', related_query_name='+', blank=True)), + ( + 'keystore', + models.FileField( + help_text='Certificate and private key in PEM format', + upload_to=b'solis_apa', + null=True, + verbose_name='Keystore', + blank=True, + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_solisapa_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'Solis', diff --git a/passerelle/contrib/solis_apa/models.py b/passerelle/contrib/solis_apa/models.py index ff5862b2..b2deb1b4 100644 --- a/passerelle/contrib/solis_apa/models.py +++ b/passerelle/contrib/solis_apa/models.py @@ -30,26 +30,23 @@ from passerelle.compat import json_loads from passerelle.contrib.solis_apa import conciliation, suivi, integration from passerelle.utils.jsonresponse import APIError -HEADERS = { - 'Accept': 'application/json', - 'Content-Type': 'application/json' -} +HEADERS = {'Accept': 'application/json', 'Content-Type': 'application/json'} APPLICATION = 'AsgTeleprocedureApa14' class SolisAPA(BaseResource): - base_url = models.CharField(max_length=128, blank=False, - verbose_name=_('url')) - verify_cert = models.BooleanField(default=True, - verbose_name=_('Check HTTPS Certificate validity')) - username = models.CharField(max_length=128, blank=True, - verbose_name=_('Username')) - password = models.CharField(max_length=128, blank=True, - verbose_name=_('Password')) - keystore = models.FileField(upload_to='solis_apa', null=True, blank=True, - verbose_name=_('Keystore'), - help_text=_('Certificate and private key in PEM format')) + base_url = models.CharField(max_length=128, blank=False, verbose_name=_('url')) + verify_cert = models.BooleanField(default=True, verbose_name=_('Check HTTPS Certificate validity')) + username = models.CharField(max_length=128, blank=True, verbose_name=_('Username')) + password = models.CharField(max_length=128, blank=True, verbose_name=_('Password')) + keystore = models.FileField( + upload_to='solis_apa', + null=True, + blank=True, + verbose_name=_('Keystore'), + help_text=_('Certificate and private key in PEM format'), + ) category = _('Business Process Connectors') @@ -67,15 +64,16 @@ class SolisAPA(BaseResource): try: ret = response.json() return ret - except(ValueError) as e: + except (ValueError) as e: raise APIError('Response content is not a valid JSON') def get_resource_url(self, uri): - return urlparse.urljoin(self.base_url,uri) + return urlparse.urljoin(self.base_url, uri) # Referentials methods - def _referential(self, referential, keys=True, order_by=False, - stop_on_error=False, attributes=[], **filters): + def _referential( + self, referential, keys=True, order_by=False, stop_on_error=False, attributes=[], **filters + ): uri = 'referential?referential=%s' % referential url = self.get_resource_url(uri) data = { @@ -83,25 +81,23 @@ class SolisAPA(BaseResource): 'processKeys': keys, 'processOrderBy': order_by, 'stopOnError': stop_on_error, - }} + } + } if filters: solis_filters = [] - for k,v in filters.items(): - solis_filters.append({ - 'key':k, - 'value': v - }) + for k, v in filters.items(): + solis_filters.append({'key': k, 'value': v}) data['ReferentialOptions']['Filters'] = {'Filter': solis_filters} if attributes: data['ReferentialOptions']['Attributes'] = { - "referential": [ - { - "schema": "stdr", - "table": referential, - "field": attributes, - } - ] + "referential": [ + { + "schema": "stdr", + "table": referential, + "field": attributes, } + ] + } data = json.dumps(data) response = self.requests.post(url, data=data, headers=HEADERS) @@ -163,11 +159,9 @@ class SolisAPA(BaseResource): cache_key = 'solis-liste-communes-%s' % code_dep ref = cache.get(cache_key) if not ref or not use_cache: - ref = self._referential(referential='commune', - attributes=['cp_lieu'], - code_dep=code_dep) + ref = self._referential(referential='commune', attributes=['cp_lieu'], code_dep=code_dep) if use_cache: - cache.set(cache_key, ref, 60*60) + cache.set(cache_key, ref, 60 * 60) villes = ref.get('results') ret = [] @@ -197,7 +191,7 @@ class SolisAPA(BaseResource): def _cache(self, key, value=None): if value: - cache.set(key, value, 60*60) + cache.set(key, value, 60 * 60) return True cache_data = cache.get(key) if cache_data: @@ -207,9 +201,9 @@ class SolisAPA(BaseResource): # si commune est un code solis de la forme commune-dep-com if commune and commune.startswith('commune-'): x, departement, commune = commune.split('-') - call = self._conciliation(conciliation.CONCILIATION_ADRESSE, - commune=commune, departement=departement, - lieu='%%%s%%' % q) + call = self._conciliation( + conciliation.CONCILIATION_ADRESSE, commune=commune, departement=departement, lieu='%%%s%%' % q + ) lieux = call.get('results') ret = [] for l in lieux: @@ -221,23 +215,23 @@ class SolisAPA(BaseResource): # 'Commune/PK/CodeCommune/@V': u'118', # 'NatureLieu/@Lc': u'RUE', # 'NomLieu/@V': u'DU BEAU SITE' - for k,v in l.items(): + for k, v in l.items(): l[k] = v.strip() - ret.append({ - 'id': '%(CodeLieu/@V)s' % l, - 'text': '%(NatureLieu/@Lc)s %(NomLieu/@V)s' % l, - 'affinity': '%(@affinity)s' % l, - }) + ret.append( + { + 'id': '%(CodeLieu/@V)s' % l, + 'text': '%(NatureLieu/@Lc)s %(NomLieu/@V)s' % l, + 'affinity': '%(@affinity)s' % l, + } + ) return ret def get_homonymes(self, nom, prenom, dn): if dn: - dn = dn[6:]+'-'+dn[3:5]+'-'+dn[:2] - call = self._conciliation(conciliation.CONCILIATION_INDIVIDU, - nom=nom, prenom=prenom, dn=dn) + dn = dn[6:] + '-' + dn[3:5] + '-' + dn[:2] + call = self._conciliation(conciliation.CONCILIATION_INDIVIDU, nom=nom, prenom=prenom, dn=dn) else: - call = self._conciliation(conciliation.CONCILIATION_INDIVIDU_SANS_DN, - nom=nom, prenom=prenom) + call = self._conciliation(conciliation.CONCILIATION_INDIVIDU_SANS_DN, nom=nom, prenom=prenom) individus = call.get('results') ret = [] for i in individus: @@ -254,26 +248,31 @@ class SolisAPA(BaseResource): # 'EtatCivil/NomJeuneFille/@V': u'BUATHIER', # 'EtatCivil/Prenom/@V': u'JEANNE', # 'PK/IndexIndividu/@V': u'208359'}, - for k,v in i.items(): + for k, v in i.items(): i[k] = v.strip() njf = i['EtatCivil/NomJeuneFille/@V'] if njf: i['EtatCivil/NomJeuneFille/@V'] = u' (%s)' % njf if not i['EtatCivil/DateNaissance/@V']: i['EtatCivil/DateNaissance/@V'] = u'date de naissance inconnue' - ret.append({ - 'id': '%(PK/IndexIndividu/@V)s' % i, - 'text': ('%(EtatCivil/Nom/@V)s%(EtatCivil/NomJeuneFille/@V)s %(EtatCivil/Prenom/@V)s' + \ - ' - %(EtatCivil/DateNaissance/@V)s' + \ - ' - %(Dossier/Adresse/CpLieu/@V)s %(Dossier/Adresse/Commune/NomCom/@V)s') % i, - 'affinity': '%(@affinity)s' % i, - }) + ret.append( + { + 'id': '%(PK/IndexIndividu/@V)s' % i, + 'text': ( + '%(EtatCivil/Nom/@V)s%(EtatCivil/NomJeuneFille/@V)s %(EtatCivil/Prenom/@V)s' + + ' - %(EtatCivil/DateNaissance/@V)s' + + ' - %(Dossier/Adresse/CpLieu/@V)s %(Dossier/Adresse/Commune/NomCom/@V)s' + ) + % i, + 'affinity': '%(@affinity)s' % i, + } + ) ret.sort(key=lambda x: x['affinity']) ret.reverse() return ret def _process_common_ref(self, ref_name, q=None): - cache_key = 'solis-apa-%s' %ref_name.replace(' ','-') + cache_key = 'solis-apa-%s' % ref_name.replace(' ', '-') ref = self._cache(cache_key) if not ref: ref = self._referential(ref_name) @@ -288,18 +287,17 @@ class SolisAPA(BaseResource): return ret def get_referential(self, reference_name, q=None): - return self._process_common_ref(reference_name.replace('-',' '), q=q) + return self._process_common_ref(reference_name.replace('-', ' '), q=q) def get_suivi(self, suivi_type, datedebut, datefin): resource = { 'visite': 'ExportSuiviVisite', 'plan-aide': 'ExportSuiviPlanAide', 'presentation-commission': 'ExportSuiviPresentationCommission', - 'decision-commission': 'ExportSuiviDecisionCommission' + 'decision-commission': 'ExportSuiviDecisionCommission', } - uri = 'exportFlow?flow={}&application={}'.format(resource[suivi_type], - APPLICATION) + uri = 'exportFlow?flow={}&application={}'.format(resource[suivi_type], APPLICATION) url = self.get_resource_url(uri) diff --git a/passerelle/contrib/solis_apa/suivi.py b/passerelle/contrib/solis_apa/suivi.py index 3d1c678b..d1a9ba68 100644 --- a/passerelle/contrib/solis_apa/suivi.py +++ b/passerelle/contrib/solis_apa/suivi.py @@ -18,56 +18,45 @@ import json import datetime PAYLOAD = { - "visite": { - }, - "plan-aide": { - "DemandeAsg": { - "DateDebut": "%(datedebut)s", - "DateFin":"%(datefin)s" - } - }, - "presentation-commission": { - "OrdreJourAsg": { - "DateDebut": "%(datedebut)s", - "DateFin":"%(datefin)s" - } - }, + "visite": {}, + "plan-aide": {"DemandeAsg": {"DateDebut": "%(datedebut)s", "DateFin": "%(datefin)s"}}, + "presentation-commission": {"OrdreJourAsg": {"DateDebut": "%(datedebut)s", "DateFin": "%(datefin)s"}}, "decision-commission": { "DemandeAsg": { "EtatDecision": "R", "DateDebutNotification": "%(datedebut)s", - "DateFinNotification":"%(datefin)s", + "DateFinNotification": "%(datefin)s", "DateDebutDecision": "%(datedebut)s", - "DateFinDecision":"%(datefin)s" + "DateFinDecision": "%(datefin)s", } - } + }, } + def get_dates(datedebut, datefin): if datedebut: datedebut = datedebut[0][:10] else: - datedebut = (datetime.datetime.now() - - datetime.timedelta(180)).strftime('%Y-%m-%d') + datedebut = (datetime.datetime.now() - datetime.timedelta(180)).strftime('%Y-%m-%d') if datefin: datefin = datefin[0][:10] else: - datefin = (datetime.datetime.now() + - datetime.timedelta(180)).strftime('%Y-%m-%d') + datefin = (datetime.datetime.now() + datetime.timedelta(180)).strftime('%Y-%m-%d') return datedebut, datefin + def render_payload(suivi_type, datedebut, datefin): datedebut, datefin = get_dates(datedebut, datefin) - payload = json.dumps(PAYLOAD[suivi_type]) %{ - 'datedebut': datedebut, 'datefin': datefin} + payload = json.dumps(PAYLOAD[suivi_type]) % {'datedebut': datedebut, 'datefin': datefin} return json.loads(payload) def suivi_output(suivi_type, data): - suivi_type = suivi_type.replace('-','_') - return globals().get('suivi_%s_output' %suivi_type)(data) + suivi_type = suivi_type.replace('-', '_') + return globals().get('suivi_%s_output' % suivi_type)(data) + def suivi_visite_output(data): results = [] @@ -94,24 +83,26 @@ def suivi_visite_output(data): intervenant['id'] = _intervenant['PK']['IndexIntervenantSocial']['@V'] intervenant['nom'] = _intervenant['Nom']['@V'] intervenant['prenom'] = _intervenant['Prenom']['@V'] - coords = _intervenant.get('Coordonnees',{}) - intervenant['email'] = coords.get('Email',{}).get('@V',u'') - intervenant['telephone'] = coords.get('Telephone',{}).get('@V',u'') + coords = _intervenant.get('Coordonnees', {}) + intervenant['email'] = coords.get('Email', {}).get('@V', u'') + intervenant['telephone'] = coords.get('Telephone', {}).get('@V', u'') intervenants.append(intervenant) info['visite_intervenants'] = intervenants select = {} - select['integration_response_data_indexDemande'] = \ - visite['VisiteDemandeAsg']['PK']['Index']['@V'] - #select['integration_response_data_indexBeneficiaire'] = \ + select['integration_response_data_indexDemande'] = visite['VisiteDemandeAsg']['PK']['Index']['@V'] + # select['integration_response_data_indexBeneficiaire'] = \ # visite['VisiteDemandeAsg']['BeneficiaireAsgDemande']['IndividuBeneficiaire']['PK']['IndexIndividu']['@V'] - results.append({ - 'data': info, - 'select': select, - # 'debug': visite - }) + results.append( + { + 'data': info, + 'select': select, + # 'debug': visite + } + ) return results + def suivi_decision_commission_output(data): results = [] @@ -133,16 +124,17 @@ def suivi_decision_commission_output(data): info['decision_commentairenotification'] = suivi.get('CommentaireNotification', {}).get('@V', '') info['decision_datenotification'] = (suivi.get('DateNotification') or {}).get('@V', '') info['decision_datedecision'] = (suivi.get('DateDecision') or {}).get('@V', '') - #if not info['decision_datenotification'] or info['decision_v'] not in ('A', 'R'): + # if not info['decision_datenotification'] or info['decision_v'] not in ('A', 'R'): # continue select = {} - select['integration_response_data_indexDemande'] = \ - dem['PK']['Index']['@V'] - results.append({ - 'data': info, - 'select': select, - #'debug': dem - }) + select['integration_response_data_indexDemande'] = dem['PK']['Index']['@V'] + results.append( + { + 'data': info, + 'select': select, + #'debug': dem + } + ) return results @@ -151,6 +143,7 @@ def suivi_plan_aide_output(data): results = [] return results + def suivi_presentation_commission_output(data): results = [] @@ -166,15 +159,17 @@ def suivi_presentation_commission_output(data): info = {} info['presentation_dateseance'] = odj['DateSeance']['@V'] select = {} - select['integration_response_data_indexDemande'] = \ - odj['OrdreJourAsgDemandeAsg']['PK']['Index']['@V'] - #select['integration_response_data_indexBeneficiaire'] = \ + select['integration_response_data_indexDemande'] = odj['OrdreJourAsgDemandeAsg']['PK']['Index'][ + '@V' + ] + # select['integration_response_data_indexBeneficiaire'] = \ # odj['OrdreJourAsgDemandeAsg']['BeneficiaireAsgDemande']['IndividuBeneficiaire']['PK']['IndexIndividu']['@V'] - results.append({ - 'data': info, - 'select': select, - # 'debug': odj - }) + results.append( + { + 'data': info, + 'select': select, + # 'debug': odj + } + ) return results - diff --git a/passerelle/contrib/solis_apa/urls.py b/passerelle/contrib/solis_apa/urls.py index 277e22fd..670fbb21 100644 --- a/passerelle/contrib/solis_apa/urls.py +++ b/passerelle/contrib/solis_apa/urls.py @@ -19,18 +19,15 @@ from django.conf.urls import include, url from .views import * urlpatterns = [ - url(r'^(?P[\w,-]+)/$', SolisAPADetailView.as_view(), - name='solis-apa-view'), - url(r'^(?P[\w,-]+)/communes/$', CommunesView.as_view(), - name='solis-apa-communes'), - url(r'^(?P[\w,-]+)/lieux/$', LieuxView.as_view(), - name='solis-apa-lieux'), - url(r'^(?P[\w,-]+)/homonymes/$', HomonymesView.as_view(), - name='solis-apa-homonymes'), - url(r'^(?P[\w,-]+)/referential/(?P[\w,-]+)/$', - ReferentialView.as_view(), name='solis-apa-referential'), - url(r'^(?P[\w,-]+)/suivi/(?P[\w,-]+)/$', - SuiviView.as_view(), name='solis-apa-suivi'), - url(r'^(?P[\w,-]+)/integration/$', IntegrationView.as_view(), - name='solis-apa-integration') + url(r'^(?P[\w,-]+)/$', SolisAPADetailView.as_view(), name='solis-apa-view'), + url(r'^(?P[\w,-]+)/communes/$', CommunesView.as_view(), name='solis-apa-communes'), + url(r'^(?P[\w,-]+)/lieux/$', LieuxView.as_view(), name='solis-apa-lieux'), + url(r'^(?P[\w,-]+)/homonymes/$', HomonymesView.as_view(), name='solis-apa-homonymes'), + url( + r'^(?P[\w,-]+)/referential/(?P[\w,-]+)/$', + ReferentialView.as_view(), + name='solis-apa-referential', + ), + url(r'^(?P[\w,-]+)/suivi/(?P[\w,-]+)/$', SuiviView.as_view(), name='solis-apa-suivi'), + url(r'^(?P[\w,-]+)/integration/$', IntegrationView.as_view(), name='solis-apa-integration'), ] diff --git a/passerelle/contrib/solis_apa/views.py b/passerelle/contrib/solis_apa/views.py index 77efbebd..0882e19b 100644 --- a/passerelle/contrib/solis_apa/views.py +++ b/passerelle/contrib/solis_apa/views.py @@ -46,7 +46,7 @@ class DetailView(GenericDetailView): return {'data': self.get_data(request, *args, **kwargs)} def _get_params(self, request, *params): - return [ request.GET.get(item, None) for item in params] + return [request.GET.get(item, None) for item in params] class CommunesView(DetailView): @@ -91,8 +91,7 @@ class ReferentialView(DetailView): class SuiviView(DetailView): def get_data(self, request, *args, **kwargs): suivi_type = kwargs['suivi_type'] - if suivi_type not in ('visite','plan-aide', - 'presentation-commission','decision-commission'): + if suivi_type not in ('visite', 'plan-aide', 'presentation-commission', 'decision-commission'): raise CommissionTypeNotFound(_('Unknown suivi type')) params = self._get_params(request, 'datedebut', 'datefin') return self.get_object().get_suivi(suivi_type, *params) diff --git a/passerelle/contrib/strasbourg_eu/migrations/0001_initial.py b/passerelle/contrib/strasbourg_eu/migrations/0001_initial.py index a9dd2050..aa6afe0e 100644 --- a/passerelle/contrib/strasbourg_eu/migrations/0001_initial.py +++ b/passerelle/contrib/strasbourg_eu/migrations/0001_initial.py @@ -17,13 +17,39 @@ class Migration(migrations.Migration): migrations.CreateModel( name='StrasbourgEu', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('description', models.TextField(verbose_name='Description')), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), - ('log_level', models.CharField(choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')], default=b'INFO', max_length=10, verbose_name='Log Level')), + ( + 'log_level', + models.CharField( + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + default=b'INFO', + max_length=10, + verbose_name='Log Level', + ), + ), ('liferay_api_url', models.URLField(max_length=256, verbose_name='Liferay API URL')), - ('users', models.ManyToManyField(blank=True, related_name='_strasbourgeu_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_strasbourgeu_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'Strasbourg.eu', diff --git a/passerelle/contrib/strasbourg_eu/models.py b/passerelle/contrib/strasbourg_eu/models.py index 889283be..b987a3bf 100644 --- a/passerelle/contrib/strasbourg_eu/models.py +++ b/passerelle/contrib/strasbourg_eu/models.py @@ -40,10 +40,12 @@ class StrasbourgEu(BaseResource): response.raise_for_status() response.json()['interests'] - @endpoint(perm='can_access', - methods=['get', 'post'], - description_get=_('List interests'), - description_post=_('Update interests')) + @endpoint( + perm='can_access', + methods=['get', 'post'], + description_get=_('List interests'), + description_post=_('Update interests'), + ) def interests(self, request, name_id=None, **kwargs): if request.method == 'POST': if name_id is None: @@ -57,7 +59,9 @@ class StrasbourgEu(BaseResource): interests = [] # reset url = urlparse.urljoin(self.liferay_api_url, 'jsonws/interest.interest/set-user-interests') try: - response = self.requests.post(url, data={'userId': name_id, 'interestIds': ','.join(interests)}).json() + response = self.requests.post( + url, data={'userId': name_id, 'interestIds': ','.join(interests)} + ).json() except ValueError: return {'err': 2, 'err_desc': 'invalid service answer'} if 'error' in response: @@ -82,13 +86,17 @@ class StrasbourgEu(BaseResource): interests.sort(key=lambda x: x['text']) return {'data': interests} - @endpoint(perm='can_access', - methods=['get', 'post'], - description_get=_('List notifications'), - description_post=_('Add notification')) + @endpoint( + perm='can_access', + methods=['get', 'post'], + description_get=_('List notifications'), + description_post=_('Add notification'), + ) def notifications(self, request, name_id, **kwargs): if request.method == 'GET': - url = urlparse.urljoin(self.liferay_api_url, 'jsonws/notification.notification/get-user-notifications') + url = urlparse.urljoin( + self.liferay_api_url, 'jsonws/notification.notification/get-user-notifications' + ) try: notifications = self.requests.post(url, data={'userId': name_id}).json() except ValueError: @@ -100,15 +108,20 @@ class StrasbourgEu(BaseResource): for date_format in ('%Y-%m-%d %H:%M:%S', '%a %b %d %H:%M:%S %Z %Y'): try: notification['parsedPublicationDate'] = force_text( - datetime.datetime.strptime(notification['publicationDate'], date_format)) + datetime.datetime.strptime(notification['publicationDate'], date_format) + ) break except ValueError: pass else: - self.logger.warning('received invalid publicationDate for notification %r: %r', - notification['id'], notification['publicationDate']) - notifications['notifications'] = [x for x in notifications['notifications'] - if x['parsedPublicationDate']] + self.logger.warning( + 'received invalid publicationDate for notification %r: %r', + notification['id'], + notification['publicationDate'], + ) + notifications['notifications'] = [ + x for x in notifications['notifications'] if x['parsedPublicationDate'] + ] notifications['notifications'].sort(key=lambda x: x['parsedPublicationDate'], reverse=True) return notifications else: @@ -126,10 +139,12 @@ class StrasbourgEu(BaseResource): else: return {'err': 1, 'err_desc': response.get('error')} - @endpoint(perm='can_access', - methods=['get', 'post'], - description_get=_('List favorites'), - description_post=_('Add favorite')) + @endpoint( + perm='can_access', + methods=['get', 'post'], + description_get=_('List favorites'), + description_post=_('Add favorite'), + ) def favorites(self, request, name_id, url_filter=None, **kwargs): if request.method == 'GET': url = urlparse.urljoin(self.liferay_api_url, 'jsonws/favorite.favorite/get-user-favorites') @@ -168,14 +183,17 @@ class StrasbourgEu(BaseResource): else: return {'err': 1, 'err_desc': response.get('error')} - @endpoint(perm='can_access', name='favorites', - methods=['post'], - description=_('Delete favorite'), - pattern=r'(?P\w+)/delete$', - example_pattern='{favorite_id}/delete', - parameters={ - 'favorite_id': {'description': _('Favorite Identifier'), 'example_value': '16'}, - }) + @endpoint( + perm='can_access', + name='favorites', + methods=['post'], + description=_('Delete favorite'), + pattern=r'(?P\w+)/delete$', + example_pattern='{favorite_id}/delete', + parameters={ + 'favorite_id': {'description': _('Favorite Identifier'), 'example_value': '16'}, + }, + ) def favorite_delete(self, request, name_id, favorite_id, **kwargs): url = urlparse.urljoin(self.liferay_api_url, 'jsonws/favorite.favorite/delete-favorite') params = {'userId': name_id, 'favoriteId': favorite_id} diff --git a/passerelle/contrib/stub_invoices/migrations/0001_initial.py b/passerelle/contrib/stub_invoices/migrations/0001_initial.py index 0c5de42e..d46b15d6 100644 --- a/passerelle/contrib/stub_invoices/migrations/0001_initial.py +++ b/passerelle/contrib/stub_invoices/migrations/0001_initial.py @@ -14,11 +14,22 @@ class Migration(migrations.Migration): migrations.CreateModel( name='StubInvoicesConnector', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_stubinvoicesconnector_users_+', related_query_name='+', blank=True)), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', + related_name='_stubinvoicesconnector_users_+', + related_query_name='+', + blank=True, + ), + ), ], options={ 'verbose_name': 'Invoices', diff --git a/passerelle/contrib/stub_invoices/migrations/0002_stubinvoicesconnector_log_level.py b/passerelle/contrib/stub_invoices/migrations/0002_stubinvoicesconnector_log_level.py index 716fa6ad..403c4342 100644 --- a/passerelle/contrib/stub_invoices/migrations/0002_stubinvoicesconnector_log_level.py +++ b/passerelle/contrib/stub_invoices/migrations/0002_stubinvoicesconnector_log_level.py @@ -14,6 +14,18 @@ class Migration(migrations.Migration): migrations.AddField( model_name='stubinvoicesconnector', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), ), ] diff --git a/passerelle/contrib/stub_invoices/models.py b/passerelle/contrib/stub_invoices/models.py index 7faa6288..4bfecea9 100644 --- a/passerelle/contrib/stub_invoices/models.py +++ b/passerelle/contrib/stub_invoices/models.py @@ -36,20 +36,22 @@ class StubInvoicesConnector(BaseResource): invoices = {} for i in range(15): now = timezone.now() - id_ = '%d%04d' % (now.year, i+1) + id_ = '%d%04d' % (now.year, i + 1) invoices[id_] = { 'id': id_, 'display_id': id_, 'total_amount': Decimal(random.randint(100, 10000)) / 100, - 'has_pdf': bool(i%3), + 'has_pdf': bool(i % 3), 'created': (now - datetime.timedelta(days=20) + datetime.timedelta(days=i)).date(), 'label': 'Label %s' % id_, - 'pay_limit_date': (now + datetime.timedelta(days=2+random.randint(0, 10))).date(), - 'online_payment': bool(i%2), + 'pay_limit_date': (now + datetime.timedelta(days=2 + random.randint(0, 10))).date(), + 'online_payment': bool(i % 2), 'paid': False, } if i < 5: - invoices[id_]['payment_date'] = invoices[id_]['created'] + datetime.timedelta(days=1+random.randint(0, 3)) + invoices[id_]['payment_date'] = invoices[id_]['created'] + datetime.timedelta( + days=1 + random.randint(0, 3) + ) invoices[id_]['online_payment'] = False invoices[id_]['paid'] = True elif invoices[id_]['online_payment'] is False: @@ -63,51 +65,67 @@ class StubInvoicesConnector(BaseResource): def get_invoice(self, invoice_id): return self.invoices.get(invoice_id) - @endpoint(name='invoices', pattern='^history/$', - description=_('Get list of paid invoices'), - example_pattern='history/') + @endpoint( + name='invoices', + pattern='^history/$', + description=_('Get list of paid invoices'), + example_pattern='history/', + ) def invoices_history(self, request, NameID=None, **kwargs): return {'data': [x for x in self.get_invoices() if x.get('payment_date')]} - @endpoint(name='invoices', - description=_('Get list of unpaid invoices')) + @endpoint(name='invoices', description=_('Get list of unpaid invoices')) def invoices_list(self, request, NameID=None, **kwargs): return {'data': [x for x in self.get_invoices() if not x.get('payment_date')]} - @endpoint(name='invoice', pattern='^(?P\w+)/?$', - description=_('Get invoice details'), - example_pattern='{invoice_id}/', - parameters={ - 'invoice_id': { - 'description': _('Invoice identifier'), - 'example_value': list(invoices.keys())[0], - }}) + @endpoint( + name='invoice', + pattern='^(?P\w+)/?$', + description=_('Get invoice details'), + example_pattern='{invoice_id}/', + parameters={ + 'invoice_id': { + 'description': _('Invoice identifier'), + 'example_value': list(invoices.keys())[0], + } + }, + ) def invoice(self, request, invoice_id, NameID=None, **kwargs): return {'data': self.get_invoice(invoice_id)} - @endpoint(name='invoice', pattern='^(?P\w+)/pdf/?$', - description=_('Get invoice as a PDF file'), - long_description=_('not yet implemented'), - example_pattern='{invoice_id}/pdf/', - parameters={ - 'invoice_id': { - 'description': _('Invoice identifier'), - 'example_value': list(invoices.keys())[0], - }}) + @endpoint( + name='invoice', + pattern='^(?P\w+)/pdf/?$', + description=_('Get invoice as a PDF file'), + long_description=_('not yet implemented'), + example_pattern='{invoice_id}/pdf/', + parameters={ + 'invoice_id': { + 'description': _('Invoice identifier'), + 'example_value': list(invoices.keys())[0], + } + }, + ) def invoice_pdf(self, request, invoice_id, NameID=None, **kwargs): response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = 'attachment; filename="%s.pdf"' % invoice_id response.write('') return response - @endpoint(name='invoice', perm='can_access', methods=['post'], pattern='^(?P\w+)/pay/?$', - description=_('Pay invoice'), - long_description=_('not yet implemented'), - example_pattern='{invoice_id}/pay/', - parameters={ - 'invoice_id': { - 'description': _('Invoice identifier'), - 'example_value': list(invoices.keys())[0], - }}) + @endpoint( + name='invoice', + perm='can_access', + methods=['post'], + pattern='^(?P\w+)/pay/?$', + description=_('Pay invoice'), + long_description=_('not yet implemented'), + example_pattern='{invoice_id}/pay/', + parameters={ + 'invoice_id': { + 'description': _('Invoice identifier'), + 'example_value': list(invoices.keys())[0], + } + }, + ) def invoice_pay(self, request, invoice_id, NameID=None, **kwargs): return {'data': None} diff --git a/passerelle/contrib/tcl/migrations/0001_initial.py b/passerelle/contrib/tcl/migrations/0001_initial.py index 4ae18575..c3ef15d9 100644 --- a/passerelle/contrib/tcl/migrations/0001_initial.py +++ b/passerelle/contrib/tcl/migrations/0001_initial.py @@ -14,7 +14,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Line', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('transport_key', models.CharField(max_length=20)), ('indice', models.CharField(max_length=20, blank=True)), ('couleur', models.CharField(max_length=20)), @@ -33,7 +36,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Stop', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('id_data', models.CharField(max_length=10)), ('nom', models.CharField(max_length=50)), ('desserte', models.CharField(max_length=200)), @@ -46,12 +52,35 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Tcl', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_line_users_+', related_query_name='+', blank=True)), + ( + 'log_level', + models.CharField( + default=b'INFO', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_line_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': 'TCL', diff --git a/passerelle/contrib/tcl/models.py b/passerelle/contrib/tcl/models.py index 408f2dad..bed7423c 100644 --- a/passerelle/contrib/tcl/models.py +++ b/passerelle/contrib/tcl/models.py @@ -25,7 +25,6 @@ from passerelle.base.models import BaseResource from passerelle.utils.api import endpoint - def get_tcl_data_url(key): if key == 'tclarret': return settings.TCL_GEOJSON_URL_TEMPLATE % key @@ -38,12 +37,13 @@ class Tcl(BaseResource): class Meta: verbose_name = _('TCL') - @endpoint(pattern='^(?P\w+)/?$', perm='can_access', - description=_('Info about a stop'), - example_pattern='{identifier}/', - parameters={ - 'identifier': {'description': _('Stop Identifier'), 'example_value': '30211'} - }) + @endpoint( + pattern='^(?P\w+)/?$', + perm='can_access', + description=_('Info about a stop'), + example_pattern='{identifier}/', + parameters={'identifier': {'description': _('Stop Identifier'), 'example_value': '30211'}}, + ) def stop(self, request, identifier): stop_object = Stop.objects.get(id_data=identifier) stop = { @@ -51,10 +51,11 @@ class Tcl(BaseResource): 'lat': str(stop_object.latitude), 'lng': str(stop_object.longitude), 'passings': [], - 'passings_by_line': [] + 'passings_by_line': [], } - response = self.requests.get(get_tcl_data_url('tclpassagearret'), - params={'field': 'id', 'value': identifier}) + response = self.requests.get( + get_tcl_data_url('tclpassagearret'), params={'field': 'id', 'value': identifier} + ) response.raise_for_status() passings_by_line = {} @@ -67,7 +68,7 @@ class Tcl(BaseResource): '326': 'F2', } for k, v in list(metro_lines.items()): # additional codes... - metro_lines[k+'A'] = v + metro_lines[k + 'A'] = v for passing in response.json()['values']: for line_code in (passing['ligne'], passing['ligne'][:-1], metro_lines.get(passing['ligne'])): try: @@ -86,9 +87,12 @@ class Tcl(BaseResource): stop['passings'].sort(key=lambda x: x['heurepassage']) stop['passings_by_line'] = sorted( - [{'ligne': v[0]['line_info']['ligne'], - 'line_info': v[0]['line_info'], 'passings': v} for k, v in passings_by_line.items()], - key=lambda x: x['passings'][0]['heurepassage']) + [ + {'ligne': v[0]['line_info']['ligne'], 'line_info': v[0]['line_info'], 'passings': v} + for k, v in passings_by_line.items() + ], + key=lambda x: x['passings'][0]['heurepassage'], + ) if not stop['passings']: # if there are no known passings, include all lines. @@ -106,7 +110,8 @@ class Tcl(BaseResource): fake_passing = {} fake_passing['line_info'] = line.get_info_dict() fake_passing['line_info']['direction'] = ( - line.libelle.split(' - ')[1] if ' - ' in line.libelle else '') + line.libelle.split(' - ')[1] if ' - ' in line.libelle else '' + ) stop['passings_by_line'].append(fake_passing) return {'data': stop} @@ -126,8 +131,10 @@ class Tcl(BaseResource): response = self.requests.get(url) response.raise_for_status() for line_data in response.json()['values']: - line, created = Line.objects.get_or_create(code_titan=line_data['code_titan'], - defaults={'transport_key': key, 'ligne': line_data['ligne']}) + line, created = Line.objects.get_or_create( + code_titan=line_data['code_titan'], + defaults={'transport_key': key, 'ligne': line_data['ligne']}, + ) line.__dict__.update(line_data) line.transport_key = key line.save() @@ -176,12 +183,14 @@ class Line(models.Model): def get_foreground_colour(self, background_colour): """Calculates the luminance of the given colour (six hexadecimal digits) - and returns an appropriate foreground colour.""" + and returns an appropriate foreground colour.""" # luminance coefficients taken from section C-9 from # http://www.faqs.org/faqs/graphics/colorspace-faq/ - brightess = int(background_colour[0:2], 16) * 0.212671 + \ - int(background_colour[2:4], 16) * 0.715160 + \ - int(background_colour[4:6], 16) * 0.072169 + brightess = ( + int(background_colour[0:2], 16) * 0.212671 + + int(background_colour[2:4], 16) * 0.715160 + + int(background_colour[4:6], 16) * 0.072169 + ) if brightess > 128: fg_colour = '000000' else: diff --git a/passerelle/contrib/teamnet_axel/migrations/0001_initial.py b/passerelle/contrib/teamnet_axel/migrations/0001_initial.py index cc918cf6..71522529 100644 --- a/passerelle/contrib/teamnet_axel/migrations/0001_initial.py +++ b/passerelle/contrib/teamnet_axel/migrations/0001_initial.py @@ -14,28 +14,55 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Link', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('nameid', models.CharField(max_length=256)), ('login', models.CharField(max_length=128)), ('pwd', models.CharField(max_length=128)), ], - options={ - }, + options={}, bases=(models.Model,), ), migrations.CreateModel( name='Management', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ( + 'id', + models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True), + ), ('title', models.CharField(verbose_name='Title', max_length=50)), ('slug', models.SlugField(verbose_name='Identifier', unique=True)), ('description', models.TextField(verbose_name='Description')), - ('wsdl_url', models.CharField(help_text='Teamnet Axel WSDL URL', max_length=128, verbose_name='WSDL URL')), - ('verify_cert', models.BooleanField(default=True, verbose_name='Check HTTPS Certificate validity')), + ( + 'wsdl_url', + models.CharField( + help_text='Teamnet Axel WSDL URL', max_length=128, verbose_name='WSDL URL' + ), + ), + ( + 'verify_cert', + models.BooleanField(default=True, verbose_name='Check HTTPS Certificate validity'), + ), ('username', models.CharField(max_length=128, verbose_name='Username', blank=True)), ('password', models.CharField(max_length=128, verbose_name='Password', blank=True)), - ('keystore', models.FileField(help_text='Certificate and private key in PEM format', upload_to=b'teamnet_axel', null=True, verbose_name='Keystore', blank=True)), - ('users', models.ManyToManyField(to='base.ApiUser', related_name='_link_users_+', related_query_name='+', blank=True)), + ( + 'keystore', + models.FileField( + help_text='Certificate and private key in PEM format', + upload_to=b'teamnet_axel', + null=True, + verbose_name='Keystore', + blank=True, + ), + ), + ( + 'users', + models.ManyToManyField( + to='base.ApiUser', related_name='_link_users_+', related_query_name='+', blank=True + ), + ), ], options={ 'verbose_name': 'Teamnet Axel', diff --git a/passerelle/contrib/teamnet_axel/migrations/0002_management_billing_regies.py b/passerelle/contrib/teamnet_axel/migrations/0002_management_billing_regies.py index 50f23c41..e63a1ae0 100644 --- a/passerelle/contrib/teamnet_axel/migrations/0002_management_billing_regies.py +++ b/passerelle/contrib/teamnet_axel/migrations/0002_management_billing_regies.py @@ -15,7 +15,9 @@ class Migration(migrations.Migration): migrations.AddField( model_name='management', name='billing_regies', - field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, verbose_name='Mapping between regie ids and billing ids'), + field=django.contrib.postgres.fields.jsonb.JSONField( + default=dict, verbose_name='Mapping between regie ids and billing ids' + ), preserve_default=True, ), ] diff --git a/passerelle/contrib/teamnet_axel/migrations/0003_management_log_level.py b/passerelle/contrib/teamnet_axel/migrations/0003_management_log_level.py index 0be7d98e..041180d3 100644 --- a/passerelle/contrib/teamnet_axel/migrations/0003_management_log_level.py +++ b/passerelle/contrib/teamnet_axel/migrations/0003_management_log_level.py @@ -14,7 +14,19 @@ class Migration(migrations.Migration): migrations.AddField( model_name='management', name='log_level', - field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]), + field=models.CharField( + default=b'NOTSET', + max_length=10, + verbose_name='Log Level', + choices=[ + (b'NOTSET', b'NOTSET'), + (b'DEBUG', b'DEBUG'), + (b'INFO', b'INFO'), + (b'WARNING', b'WARNING'), + (b'ERROR', b'ERROR'), + (b'CRITICAL', b'CRITICAL'), + ], + ), preserve_default=True, ), ] diff --git a/passerelle/contrib/teamnet_axel/migrations/0008_auto_20200504_1402.py b/passerelle/contrib/teamnet_axel/migrations/0008_auto_20200504_1402.py index d2c3bce4..756f743a 100644 --- a/passerelle/contrib/teamnet_axel/migrations/0008_auto_20200504_1402.py +++ b/passerelle/contrib/teamnet_axel/migrations/0008_auto_20200504_1402.py @@ -16,11 +16,19 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='teamnetaxel', name='billing_regies', - field=django.contrib.postgres.fields.jsonb.JSONField(verbose_name='Mapping between regie ids and billing ids'), + field=django.contrib.postgres.fields.jsonb.JSONField( + verbose_name='Mapping between regie ids and billing ids' + ), ), migrations.AlterField( model_name='teamnetaxel', name='keystore', - field=models.FileField(blank=True, help_text='Certificate and private key in PEM format', null=True, upload_to='teamnet_axel', verbose_name='Keystore'), + field=models.FileField( + blank=True, + help_text='Certificate and private key in PEM format', + null=True, + upload_to='teamnet_axel', + verbose_name='Keystore', + ), ), ] diff --git a/passerelle/contrib/teamnet_axel/models.py b/passerelle/contrib/teamnet_axel/models.py index df872a17..13531f6b 100644 --- a/passerelle/contrib/teamnet_axel/models.py +++ b/passerelle/contrib/teamnet_axel/models.py @@ -53,22 +53,18 @@ def get_name_id(request): class TeamnetAxel(BaseResource): wsdl_url = models.CharField( - max_length=128, blank=False, - verbose_name=_('WSDL URL'), - help_text=_('Teamnet Axel WSDL URL')) - verify_cert = models.BooleanField( - default=True, - verbose_name=_('Check HTTPS Certificate validity')) - username = models.CharField( - max_length=128, blank=True, - verbose_name=_('Username')) - password = models.CharField( - max_length=128, blank=True, - verbose_name=_('Password')) + max_length=128, blank=False, verbose_name=_('WSDL URL'), help_text=_('Teamnet Axel WSDL URL') + ) + verify_cert = models.BooleanField(default=True, verbose_name=_('Check HTTPS Certificate validity')) + username = models.CharField(max_length=128, blank=True, verbose_name=_('Username')) + password = models.CharField(max_length=128, blank=True, verbose_name=_('Password')) keystore = models.FileField( - upload_to='teamnet_axel', null=True, blank=True, + upload_to='teamnet_axel', + null=True, + blank=True, verbose_name=_('Keystore'), - help_text=_('Certificate and private key in PEM format')) + help_text=_('Certificate and private key in PEM format'), + ) billing_regies = JSONField(_('Mapping between regie ids and billing ids')) @@ -103,17 +99,17 @@ class TeamnetAxel(BaseResource): # Axel authentication def authenticate(self, login, pwd): - '''return False or an AXEL user dict: - { - "login": "23060A", - "estidentifie": true, - "estbloque": false, - "estchangement_mdp_requis": false, - "nbechec": "0", - "idpersonne": "47747", - "idfamille": "23060", - } - ''' + """return False or an AXEL user dict: + { + "login": "23060A", + "estidentifie": true, + "estbloque": false, + "estchangement_mdp_requis": false, + "nbechec": "0", + "idpersonne": "47747", + "idfamille": "23060", + } + """ xml_utilisateur = ET.Element('UTILISATEUR') ET.SubElement(xml_utilisateur, 'LOGIN').text = login ET.SubElement(xml_utilisateur, 'PWD').text = pwd @@ -167,7 +163,7 @@ class TeamnetAxel(BaseResource): def ping(self, request, *args, **kwargs): try: client = soap.get_client(self) - except (Exception, ) as exc: + except (Exception,) as exc: raise APIError('Client Error: %s' % exc) res = {'ping': 'pong'} if 'debug' in request.GET: @@ -215,8 +211,7 @@ class TeamnetAxel(BaseResource): if 'idfamille' not in user: raise APIError('user without idfamille') famille = self.get_family_data(user['idfamille']) - Link.objects.update_or_create( - resource=self, nameid=nameid, defaults={'login': login, 'pwd': pwd}) + Link.objects.update_or_create(resource=self, nameid=nameid, defaults={'login': login, 'pwd': pwd}) user['_famille'] = famille user['_nameid'] = nameid return {'data': user} @@ -282,8 +277,7 @@ class TeamnetAxel(BaseResource): invoices = sorted(historical, key=lambda i: i['created'], reverse=True) return {'data': invoices} - @endpoint(name='regie', perm='can_access', - pattern='^(?P\w+)/invoice/(?P[\w,-]+)/$') + @endpoint(name='regie', perm='can_access', pattern='^(?P\w+)/invoice/(?P[\w,-]+)/$') def get_invoice_details(self, request, regie_id, invoice_id, **kwargs): family_id, i = invoice_id.split('-', 1) payable = self.get_teamnet_payable_invoices(regie_id, family_id) @@ -294,8 +288,9 @@ class TeamnetAxel(BaseResource): return {'data': historical[invoice_id]} return {'data': None} - @endpoint(name='regie', perm='can_access', - pattern='^(?P\w+)/invoice/(?P[\w,-]+)/pdf/$') + @endpoint( + name='regie', perm='can_access', pattern='^(?P\w+)/invoice/(?P[\w,-]+)/pdf/$' + ) def invoice_pdf(self, request, regie_id, invoice_id, **kwargs): family_id, invoice = invoice_id.split('-', 1) invoice_xml = ET.Element('FACTUREPDF') @@ -311,8 +306,12 @@ class TeamnetAxel(BaseResource): response.write(b64content) return response - @endpoint(name='regie', methods=['post'], - perm='can_access', pattern='^(?P\w+)/invoice/(?P[\w,-]+)/pay/$') + @endpoint( + name='regie', + methods=['post'], + perm='can_access', + pattern='^(?P\w+)/invoice/(?P[\w,-]+)/pay/$', + ) def pay_invoice(self, request, regie_id, invoice_id, **kwargs): data = json_loads(request.body) transaction_id = data.get('transaction_id') diff --git a/passerelle/contrib/teamnet_axel/soap.py b/passerelle/contrib/teamnet_axel/soap.py index 7db553c9..973e9880 100644 --- a/passerelle/contrib/teamnet_axel/soap.py +++ b/passerelle/contrib/teamnet_axel/soap.py @@ -24,7 +24,6 @@ from suds.client import Client class Transport(HttpAuthenticated): - def __init__(self, model, **kwargs): self.model = model HttpAuthenticated.__init__(self, **kwargs) # oldstyle class... @@ -40,16 +39,14 @@ class Transport(HttpAuthenticated): return kwargs def open(self, request): - resp = self.model.requests.get( - request.url, headers=request.headers, - **self.get_requests_kwargs()) + resp = self.model.requests.get(request.url, headers=request.headers, **self.get_requests_kwargs()) return BytesIO(resp.content) def send(self, request): self.addcredentials(request) resp = self.model.requests.post( - request.url, data=request.message, - headers=request.headers, **self.get_requests_kwargs()) + request.url, data=request.message, headers=request.headers, **self.get_requests_kwargs() + ) result = Reply(resp.status_code, resp.headers, resp.content) return result diff --git a/passerelle/contrib/toulouse_axel/migrations/0001_initial.py b/passerelle/contrib/toulouse_axel/migrations/0001_initial.py index 3a7f9c28..1efd439c 100644 --- a/passerelle/contrib/toulouse_axel/migrations/0001_initial.py +++ b/passerelle/contrib/toulouse_axel/migrations/0001_initial.py @@ -17,7 +17,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Link', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('name_id', models.CharField(max_length=256)), ('dui', models.CharField(max_length=128)), ('person_id', models.CharField(max_length=128)), @@ -26,12 +29,28 @@ class Migration(migrations.Migration): migrations.CreateModel( name='ToulouseAxel', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), ('description', models.TextField(verbose_name='Description')), - ('wsdl_url', models.CharField(help_text='Toulouse Axel WSDL URL', max_length=128, verbose_name='WSDL URL')), - ('users', models.ManyToManyField(blank=True, related_name='_toulouseaxel_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'wsdl_url', + models.CharField( + help_text='Toulouse Axel WSDL URL', max_length=128, verbose_name='WSDL URL' + ), + ), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_toulouseaxel_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'Toulouse Axel', @@ -40,7 +59,9 @@ class Migration(migrations.Migration): migrations.AddField( model_name='link', name='resource', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='toulouse_axel.ToulouseAxel'), + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to='toulouse_axel.ToulouseAxel' + ), ), migrations.AlterUniqueTogether( name='link', diff --git a/passerelle/contrib/toulouse_axel/migrations/0003_auto_20191205_0948.py b/passerelle/contrib/toulouse_axel/migrations/0003_auto_20191205_0948.py index 11990ef8..8d64115e 100644 --- a/passerelle/contrib/toulouse_axel/migrations/0003_auto_20191205_0948.py +++ b/passerelle/contrib/toulouse_axel/migrations/0003_auto_20191205_0948.py @@ -15,11 +15,19 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Lock', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('key', models.CharField(max_length=256)), ('lock_date', models.DateTimeField(auto_now_add=True)), ('locker', models.CharField(blank=True, max_length=256)), - ('resource', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='toulouse_axel.ToulouseAxel')), + ( + 'resource', + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to='toulouse_axel.ToulouseAxel' + ), + ), ], ), migrations.AlterUniqueTogether( diff --git a/passerelle/contrib/toulouse_axel/models.py b/passerelle/contrib/toulouse_axel/models.py index 7071c633..2c513af2 100644 --- a/passerelle/contrib/toulouse_axel/models.py +++ b/passerelle/contrib/toulouse_axel/models.py @@ -52,10 +52,8 @@ WEEKDAYS = { class ToulouseAxel(BaseResource): wsdl_url = models.CharField( - max_length=128, - blank=False, - verbose_name=_('WSDL URL'), - help_text=_('Toulouse Axel WSDL URL')) + max_length=128, blank=False, verbose_name=_('WSDL URL'), help_text=_('Toulouse Axel WSDL URL') + ) category = _('Business Process Connectors') @@ -74,8 +72,9 @@ class ToulouseAxel(BaseResource): perm='can_access', parameters={ 'key': {'description': _('Key of the resource to lock')}, - 'locker': {'description': _('Identifier of the locker (can be empty)')} - }) + 'locker': {'description': _('Identifier of the locker (can be empty)')}, + }, + ) def lock(self, request, key, locker): if not key: raise APIError('key is empty', err_code='bad-request', http_status=400) @@ -88,7 +87,8 @@ class ToulouseAxel(BaseResource): perm='can_access', parameters={ 'key': {'description': _('Key of the resource to unlock')}, - }) + }, + ) def unlock(self, request, key): try: lock = Lock.objects.get(resource=self, key=key) @@ -103,7 +103,8 @@ class ToulouseAxel(BaseResource): perm='can_access', parameters={ 'key': {'description': _('Key of the resource')}, - }) + }, + ) def locked(self, request, key): try: lock = Lock.objects.get(resource=self, key=key) @@ -123,8 +124,8 @@ class ToulouseAxel(BaseResource): raise APIError( 'Axel error: %s' % e, err_code='error', - data={'xml_request': e.xml_request, - 'xml_response': e.xml_response}) + data={'xml_request': e.xml_request, 'xml_response': e.xml_response}, + ) management_dates = {} for key, value in result.json_response['DATA']['PORTAIL']['DUIDATEGESTION'].items(): management_dates[key] = value @@ -133,10 +134,7 @@ class ToulouseAxel(BaseResource): cache.set(cache_key, management_dates, 3600) # 1 hour return management_dates - @endpoint( - display_order=4, - description=_("Get dates of the update management"), - perm='can_access') + @endpoint(display_order=4, description=_("Get dates of the update management"), perm='can_access') def management_dates(self, request): return {'data': self.get_management_dates()} @@ -147,8 +145,8 @@ class ToulouseAxel(BaseResource): raise APIError( 'Axel error: %s' % e, err_code='error', - data={'xml_request': e.xml_request, - 'xml_response': e.xml_response}) + data={'xml_request': e.xml_request, 'xml_response': e.xml_response}, + ) dui_data = result.json_response['DATA']['PORTAIL']['DUI'] code = dui_data['CODE'] @@ -171,7 +169,8 @@ class ToulouseAxel(BaseResource): 'application/json': schemas.LINK_SCHEMA, } } - }) + }, + ) def link(self, request, NameID, post_data): if not NameID: raise APIError('NameID is empty', err_code='bad-request', http_status=400) @@ -186,10 +185,8 @@ class ToulouseAxel(BaseResource): dui_data = result.json_response['DATA']['PORTAIL']['DUI'] link, created = self.link_set.get_or_create( - name_id=NameID, - defaults={ - 'dui': dui_data['IDDUI'], - 'person_id': dui_data['IDPERSONNE']}) + name_id=NameID, defaults={'dui': dui_data['IDDUI'], 'person_id': dui_data['IDPERSONNE']} + ) if not created and (link.dui != dui_data['IDDUI'] or link.person_id != dui_data['IDPERSONNE']): raise APIError('Data conflict', err_code='conflict') return { @@ -199,7 +196,7 @@ class ToulouseAxel(BaseResource): 'data': { 'xml_request': result.xml_request, 'xml_response': result.xml_response, - } + }, } def get_link(self, name_id): @@ -216,7 +213,8 @@ class ToulouseAxel(BaseResource): perm='can_access', parameters={ 'NameID': {'description': _('Publik ID')}, - }) + }, + ) def unlink(self, request, NameID): link = self.get_link(NameID) link_id = link.pk @@ -230,7 +228,8 @@ class ToulouseAxel(BaseResource): perm='can_access', parameters={ 'NameID': {'description': _('Publik ID')}, - }) + }, + ) def active_dui(self, request, NameID): # get link if exists try: @@ -274,9 +273,14 @@ class ToulouseAxel(BaseResource): pattern=r'^(?P[\w-]+)/?$', example_pattern='{code}', parameters={ - 'code': {'description': _('Referential code. Possible values: situation_familiale, csp, lien_parente, type_regime, regime'), - 'example_value': 'csp'}, - }) + 'code': { + 'description': _( + 'Referential code. Possible values: situation_familiale, csp, lien_parente, type_regime, regime' + ), + 'example_value': 'csp', + }, + }, + ) def referential(self, request, code): if code not in ['situation_familiale', 'csp', 'lien_parente', 'type_regime', 'regime']: raise APIError('Referential not found', err_code='not-found') @@ -292,8 +296,8 @@ class ToulouseAxel(BaseResource): raise APIError( 'Axel error: %s' % e, err_code='error', - data={'xml_request': e.xml_request, - 'xml_response': e.xml_response}) + data={'xml_request': e.xml_request, 'xml_response': e.xml_response}, + ) family_data = result.json_response['DATA']['PORTAIL']['DUI'] @@ -312,7 +316,9 @@ class ToulouseAxel(BaseResource): family_data['annee_reference_short'] = str(current_reference_year)[2:] family_data['annee_reference_label'] = '{}/{}'.format(current_reference_year, next_reference_year) - family_data['SITUATIONFAMILIALE_label'] = utils.get_label(utils.situation_familiale_mapping, family_data['SITUATIONFAMILIALE']) + family_data['SITUATIONFAMILIALE_label'] = utils.get_label( + utils.situation_familiale_mapping, family_data['SITUATIONFAMILIALE'] + ) for key in ['RL1', 'RL2']: if key not in family_data: continue @@ -325,10 +331,14 @@ class ToulouseAxel(BaseResource): for i, contact in enumerate(child.get('CONTACT', [])): contact['id'] = i contact['text'] = '{} {}'.format(contact['PRENOM'], contact['NOM']).strip() - contact['LIENPARENTE_label'] = utils.get_label(utils.lien_parente_mapping, contact['LIENPARENTE']) + contact['LIENPARENTE_label'] = utils.get_label( + utils.lien_parente_mapping, contact['LIENPARENTE'] + ) if 'REVENUS' in family_data: - family_data['REVENUS']['TYPEREGIME_label'] = utils.get_label(utils.type_regime_mapping, family_data['REVENUS']['TYPEREGIME']) + family_data['REVENUS']['TYPEREGIME_label'] = utils.get_label( + utils.type_regime_mapping, family_data['REVENUS']['TYPEREGIME'] + ) return family_data @@ -339,7 +349,8 @@ class ToulouseAxel(BaseResource): perm='can_access', parameters={ 'NameID': {'description': _('Publik ID')}, - }) + }, + ) def family_info(self, request, NameID): link = self.get_link(NameID) family_data = self.get_family_data(link.dui, check_registrations=True, with_management_dates=True) @@ -352,7 +363,8 @@ class ToulouseAxel(BaseResource): perm='can_access', parameters={ 'NameID': {'description': _('Publik ID')}, - }) + }, + ) def children_info(self, request, NameID): link = self.get_link(NameID) family_data = self.get_family_data(link.dui, check_registrations=True) @@ -366,7 +378,8 @@ class ToulouseAxel(BaseResource): parameters={ 'NameID': {'description': _('Publik ID')}, 'idpersonne': {'description': _('Child ID')}, - }) + }, + ) def child_info(self, request, idpersonne, NameID): link = self.get_link(NameID) family_data = self.get_family_data(link.dui, check_registrations=True) @@ -385,7 +398,8 @@ class ToulouseAxel(BaseResource): parameters={ 'NameID': {'description': _('Publik ID')}, 'idpersonne': {'description': _('Child ID')}, - }) + }, + ) def child_contacts_info(self, request, idpersonne, NameID): link = self.get_link(NameID) family_data = self.get_family_data(link.dui, check_registrations=True) @@ -403,7 +417,8 @@ class ToulouseAxel(BaseResource): perm='can_access', parameters={ 'NameID': {'description': _('Publik ID')}, - }) + }, + ) def children_contacts_info(self, request, NameID): link = self.get_link(NameID) family_data = self.get_family_data(link.dui, check_registrations=True) @@ -565,17 +580,21 @@ class ToulouseAxel(BaseResource): new_allergie = [] for key in ['ASTHME', 'MEDICAMENTEUSES', 'ALIMENTAIRES']: if utils.encode_bool(child['SANITAIRE']['ALLERGIE'][key]) == 'OUI': - new_allergie.append({ - 'TYPE': key, - 'ALLERGIQUE': 'OUI', - 'NOMALLERGIE': None, - }) + new_allergie.append( + { + 'TYPE': key, + 'ALLERGIQUE': 'OUI', + 'NOMALLERGIE': None, + } + ) if child['SANITAIRE']['ALLERGIE']['AUTRES']: - new_allergie.append({ - 'TYPE': 'AUTRES', - 'ALLERGIQUE': 'OUI', - 'NOMALLERGIE': child['SANITAIRE']['ALLERGIE']['AUTRES'], - }) + new_allergie.append( + { + 'TYPE': 'AUTRES', + 'ALLERGIQUE': 'OUI', + 'NOMALLERGIE': child['SANITAIRE']['ALLERGIE']['AUTRES'], + } + ) child['SANITAIRE']['ALLERGIE'] = new_allergie if not child['SANITAIRE'].get('ALLERGIE'): # remove ALLERGIE block if empty @@ -616,7 +635,8 @@ class ToulouseAxel(BaseResource): 'application/json': schemas.UPDATE_FAMILY_SCHEMA, } } - }) + }, + ) def update_family_info(self, request, NameID, post_data): link = self.get_link(NameID) @@ -637,9 +657,12 @@ class ToulouseAxel(BaseResource): raise APIError( 'Axel error: %s' % e, err_code='error', - data={'error_post_data': post_data, - 'xml_request': e.xml_request, - 'xml_response': e.xml_response}) + data={ + 'error_post_data': post_data, + 'xml_request': e.xml_request, + 'xml_response': e.xml_response, + }, + ) return { 'updated': True, @@ -647,7 +670,7 @@ class ToulouseAxel(BaseResource): 'data': { 'xml_request': result.xml_request, 'xml_response': result.xml_response, - } + }, } def get_invoices(self, regie_id, dui=None, name_id=None): @@ -661,8 +684,8 @@ class ToulouseAxel(BaseResource): raise APIError( 'Axel error: %s' % e, err_code='error', - data={'xml_request': e.xml_request, - 'xml_response': e.xml_response}) + data={'xml_request': e.xml_request, 'xml_response': e.xml_response}, + ) data = result.json_response['DATA']['PORTAIL']['DUI'] result = [] @@ -676,14 +699,14 @@ class ToulouseAxel(BaseResource): link = self.get_link(name_id) try: result = schemas.list_dui_factures( - self, - {'LISTFACTURE': {'NUMDUI': link.dui, 'DEBUT': '1970-01-01'}}) + self, {'LISTFACTURE': {'NUMDUI': link.dui, 'DEBUT': '1970-01-01'}} + ) except AxelError as e: raise APIError( 'Axel error: %s' % e, err_code='error', - data={'xml_request': e.xml_request, - 'xml_response': e.xml_response}) + data={'xml_request': e.xml_request, 'xml_response': e.xml_response}, + ) data = result.json_response['DATA']['PORTAIL']['LISTFACTURE'] result = [] @@ -698,7 +721,9 @@ class ToulouseAxel(BaseResource): 'NUMDIRECTION': direction['NUMDIRECTION'], 'IDDIRECTION': direction['IDDIRECTION'], 'LIBDIRECTION': direction['LIBDIRECTION'], - })) + }, + ) + ) return result def get_invoice(self, regie_id, invoice_id, dui=None, name_id=None, historical=None): @@ -720,8 +745,9 @@ class ToulouseAxel(BaseResource): description=_("Get invoices to pay"), parameters={ 'NameID': {'description': _('Publik ID')}, - 'regie_id': {'description': _('Regie identifier'), 'example_value': '42-PERISCOL'} - }) + 'regie_id': {'description': _('Regie identifier'), 'example_value': '42-PERISCOL'}, + }, + ) def invoices(self, request, regie_id, NameID): invoices_data = self.get_invoices(regie_id=regie_id, name_id=NameID) return {'data': invoices_data} @@ -736,8 +762,9 @@ class ToulouseAxel(BaseResource): description=_("Get invoices already paid"), parameters={ 'NameID': {'description': _('Publik ID')}, - 'regie_id': {'description': _('Regie identifier'), 'example_value': '42-PERISCOL'} - }) + 'regie_id': {'description': _('Regie identifier'), 'example_value': '42-PERISCOL'}, + }, + ) def invoices_history(self, request, regie_id, NameID): invoices_data = self.get_historical_invoices(name_id=NameID) return {'data': invoices_data} @@ -753,12 +780,15 @@ class ToulouseAxel(BaseResource): parameters={ 'NameID': {'description': _('Publik ID')}, 'regie_id': {'description': _('Regie identifier'), 'example_value': '42-PERISCOL'}, - 'invoice_id': {'description': _('Invoice identifier'), 'example_value': 'DUI-42'} - }) + 'invoice_id': {'description': _('Invoice identifier'), 'example_value': 'DUI-42'}, + }, + ) def invoice(self, request, regie_id, invoice_id, NameID): real_invoice_id = invoice_id.split('-')[-1] historical = invoice_id.startswith('historical-') - invoice = self.get_invoice(regie_id=regie_id, name_id=NameID, invoice_id=real_invoice_id, historical=historical) + invoice = self.get_invoice( + regie_id=regie_id, name_id=NameID, invoice_id=real_invoice_id, historical=historical + ) if invoice is None: raise APIError('Invoice not found', err_code='not-found') @@ -775,14 +805,17 @@ class ToulouseAxel(BaseResource): parameters={ 'NameID': {'description': _('Publik ID')}, 'regie_id': {'description': _('Regie identifier'), 'example_value': '42-PERISCOL'}, - 'invoice_id': {'description': _('Invoice identifier'), 'example_value': 'DUI-42'} - }) + 'invoice_id': {'description': _('Invoice identifier'), 'example_value': 'DUI-42'}, + }, + ) def invoice_pdf(self, request, regie_id, invoice_id, NameID): # check that invoice is related to current user real_invoice_id = invoice_id.split('-')[-1] historical = invoice_id.startswith('historical-') try: - invoice = self.get_invoice(regie_id=regie_id, name_id=NameID, invoice_id=real_invoice_id, historical=historical) + invoice = self.get_invoice( + regie_id=regie_id, name_id=NameID, invoice_id=real_invoice_id, historical=historical + ) except APIError as e: e.http_status = 404 raise @@ -793,14 +826,16 @@ class ToulouseAxel(BaseResource): raise APIError('PDF not available', err_code='not-available', http_status=404) try: - result = schemas.ref_facture_pdf(self, {'PORTAIL': {'FACTUREPDF': {'IDFACTURE': int(invoice['display_id'])}}}) + result = schemas.ref_facture_pdf( + self, {'PORTAIL': {'FACTUREPDF': {'IDFACTURE': int(invoice['display_id'])}}} + ) except AxelError as e: raise APIError( 'Axel error: %s' % e, err_code='error', http_status=404, - data={'xml_request': e.xml_request, - 'xml_response': e.xml_response}) + data={'xml_request': e.xml_request, 'xml_response': e.xml_response}, + ) b64content = base64.b64decode(result.json_response['DATA']['PORTAIL']['PDF']['@FILE']) if not b64content: @@ -821,7 +856,7 @@ class ToulouseAxel(BaseResource): description=_('Notify an invoice as paid'), parameters={ 'regie_id': {'description': _('Regie identifier'), 'example_value': '42-PERISCOL'}, - 'invoice_id': {'description': _('Invoice identifier'), 'example_value': 'DUI-42'} + 'invoice_id': {'description': _('Invoice identifier'), 'example_value': 'DUI-42'}, }, post={ 'request_body': { @@ -829,7 +864,8 @@ class ToulouseAxel(BaseResource): 'application/json': schemas.PAYMENT_SCHEMA, } } - }) + }, + ) def pay_invoice(self, request, regie_id, invoice_id, **kwargs): data = json_loads(request.body) dui, invoice_id = invoice_id.split('-') @@ -857,8 +893,8 @@ class ToulouseAxel(BaseResource): raise APIError( 'Axel error: %s' % e, err_code='error', - data={'xml_request': e.xml_request, - 'xml_response': e.xml_response}) + data={'xml_request': e.xml_request, 'xml_response': e.xml_response}, + ) return {'data': True} def get_children_activities(self, dui, reference_year): @@ -867,19 +903,22 @@ class ToulouseAxel(BaseResource): if result is not None: return result try: - result = schemas.enfants_activites(self, { - 'DUI': { - 'IDDUI': dui, - 'ANNEEREFERENCE': str(reference_year), - 'TYPESACTIVITES': 'MAT,MIDI,SOIR,GARD', - } - }) + result = schemas.enfants_activites( + self, + { + 'DUI': { + 'IDDUI': dui, + 'ANNEEREFERENCE': str(reference_year), + 'TYPESACTIVITES': 'MAT,MIDI,SOIR,GARD', + } + }, + ) except AxelError as e: raise APIError( 'Axel error: %s' % e, err_code='error', - data={'xml_request': e.xml_request, - 'xml_response': e.xml_response}) + data={'xml_request': e.xml_request, 'xml_response': e.xml_response}, + ) children_activities = result.json_response['DATA']['PORTAIL']['DUI'].get('ENFANT', []) for child in children_activities: @@ -894,7 +933,12 @@ class ToulouseAxel(BaseResource): # exclude also child with more than one registration per activity_type or missing activity activity_types = [a['TYPEACTIVITE'] for a in child.get('ACTIVITE', [])] activity_types.sort() - if activity_types != ['MAT', 'MIDI', 'SOIR'] and activity_types != ['GARD', 'MAT', 'MIDI', 'SOIR']: + if activity_types != ['MAT', 'MIDI', 'SOIR'] and activity_types != [ + 'GARD', + 'MAT', + 'MIDI', + 'SOIR', + ]: # GARD is optional continue # ok, store child @@ -924,7 +968,12 @@ class ToulouseAxel(BaseResource): def get_booking_data(self, dui, child_id, booking_date): start_date, end_date = utils.get_week_dates_from_date(booking_date) - cache_key = 'toulouse-axel-%s-booking-data-%s-%s-%s' % (self.pk, dui, child_id, start_date.isoformat()) + cache_key = 'toulouse-axel-%s-booking-data-%s-%s-%s' % ( + self.pk, + dui, + child_id, + start_date.isoformat(), + ) result = cache.get(cache_key) if result is not None: return result @@ -933,36 +982,42 @@ class ToulouseAxel(BaseResource): # first get activities information for the child child_activities = self.get_child_activities( - dui=dui, - reference_year=reference_year, - child_id=child_id) + dui=dui, reference_year=reference_year, child_id=child_id + ) # then get booking of the requested week for the child activity_ids = [act['IDACTIVITE'] for act in child_activities.get('ACTIVITE', [])] activity_data = [] for activity_id in activity_ids: - activity_data.append({ - 'IDACTIVITE': activity_id, - 'ANNEEREFERENCE': str(reference_year), - 'DATEDEBUT': start_date.strftime(utils.json_date_format), - 'DATEDFIN': end_date.strftime(utils.json_date_format), - }) - try: - data = schemas.reservation_periode(self, {'PORTAIL': { - 'DUI': { - 'IDDUI': dui, - 'ENFANT': { - 'IDPERSONNE': child_id, - 'ACTIVITE': activity_data, - } + activity_data.append( + { + 'IDACTIVITE': activity_id, + 'ANNEEREFERENCE': str(reference_year), + 'DATEDEBUT': start_date.strftime(utils.json_date_format), + 'DATEDFIN': end_date.strftime(utils.json_date_format), } - }}) + ) + try: + data = schemas.reservation_periode( + self, + { + 'PORTAIL': { + 'DUI': { + 'IDDUI': dui, + 'ENFANT': { + 'IDPERSONNE': child_id, + 'ACTIVITE': activity_data, + }, + } + } + }, + ) except AxelError as e: raise APIError( 'Axel error: %s' % e, err_code='error', - data={'xml_request': e.xml_request, - 'xml_response': e.xml_response}) + data={'xml_request': e.xml_request, 'xml_response': e.xml_response}, + ) child_booking = None for child in data.json_response['DATA']['PORTAIL']['DUI'].get('ENFANT', []): @@ -984,7 +1039,7 @@ class ToulouseAxel(BaseResource): 'wednesday': utils.get_booking(booking['JOUR'][2]), 'thursday': utils.get_booking(booking['JOUR'][3]), 'friday': utils.get_booking(booking['JOUR'][4]), - } + }, } for activity in child_activities.get('ACTIVITE', []): @@ -994,7 +1049,8 @@ class ToulouseAxel(BaseResource): activity['text'] = u'{} (inscription du {} au {})'.format( activity['LIBELLEACTIVITE'], start_date.strftime(utils.xml_date_format), - end_date.strftime(utils.xml_date_format)) + end_date.strftime(utils.xml_date_format), + ) activity['annee_reference'] = reference_year activity['annee_reference_short'] = str(reference_year)[2:] activity['annee_reference_label'] = '{}/{}'.format(reference_year, reference_year + 1) @@ -1010,8 +1066,11 @@ class ToulouseAxel(BaseResource): perm='can_access', parameters={ 'NameID': {'description': _('Publik ID')}, - 'pivot_date': {'description': _('Pivot date (format MM-DD). After this date, next year is available.')}, - }) + 'pivot_date': { + 'description': _('Pivot date (format MM-DD). After this date, next year is available.') + }, + }, + ) def clae_years(self, request, NameID, pivot_date): link = self.get_link(NameID) @@ -1020,7 +1079,9 @@ class ToulouseAxel(BaseResource): # get pivot date try: - pivot_date = datetime.datetime.strptime('%s-%s' % (reference_year, pivot_date), utils.json_date_format).date() + pivot_date = datetime.datetime.strptime( + '%s-%s' % (reference_year, pivot_date), utils.json_date_format + ).date() except ValueError: raise APIError('bad date format, should be MM-DD', err_code='bad-request', http_status=400) # adjust pivot year @@ -1028,12 +1089,14 @@ class ToulouseAxel(BaseResource): # between january and july, reference year is the year just before pivot_date = pivot_date.replace(year=reference_year + 1) - data = [{ - 'id': str(reference_year), - 'text': '%s/%s' % (reference_year, reference_year + 1), - 'type': 'encours', - 'refdate': today.strftime(utils.json_date_format) - }] + data = [ + { + 'id': str(reference_year), + 'text': '%s/%s' % (reference_year, reference_year + 1), + 'type': 'encours', + 'refdate': today.strftime(utils.json_date_format), + } + ] if today < pivot_date: # date pivot not in the past, return only current year return {'data': data} @@ -1047,12 +1110,14 @@ class ToulouseAxel(BaseResource): # 02/29 ? next_ref_date = today + datetime.timedelta(days=366) # return also next year - data.append({ - 'id': str(reference_year + 1), - 'text': '%s/%s' % (reference_year + 1, reference_year + 2), - 'type': 'suivante', - 'refdate': next_ref_date.strftime(utils.json_date_format) - }) + data.append( + { + 'id': str(reference_year + 1), + 'text': '%s/%s' % (reference_year + 1, reference_year + 2), + 'type': 'suivante', + 'refdate': next_ref_date.strftime(utils.json_date_format), + } + ) return {'data': data} @endpoint( @@ -1063,7 +1128,8 @@ class ToulouseAxel(BaseResource): parameters={ 'NameID': {'description': _('Publik ID')}, 'booking_date': {'description': _('Booking date (to get reference year)')}, - }) + }, + ) def clae_children_activities_info(self, request, NameID, booking_date): link = self.get_link(NameID) try: @@ -1088,7 +1154,8 @@ class ToulouseAxel(BaseResource): 'idpersonne': {'description': _('Child ID')}, 'start_date': {'description': _('Start date of the period')}, 'end_date': {'description': _('End date of the period')}, - }) + }, + ) def clae_booking_activities_info(self, request, NameID, idpersonne, start_date, end_date): link = self.get_link(NameID) try: @@ -1101,10 +1168,15 @@ class ToulouseAxel(BaseResource): in_8_days = today + datetime.timedelta(days=8) def get_activities_for_week(week_start_date, week_end_date): - booking_data = self.get_booking_data(dui=link.dui, child_id=idpersonne, booking_date=week_start_date).get('ACTIVITE', []) + booking_data = self.get_booking_data( + dui=link.dui, child_id=idpersonne, booking_date=week_start_date + ).get('ACTIVITE', []) booking_data = {d['TYPEACTIVITE']: d for d in booking_data} start_date, end_date = utils.get_week_dates_from_date(week_start_date) - week = 'week:%s:%s' % (start_date.strftime(utils.json_date_format), end_date.strftime(utils.json_date_format)) + week = 'week:%s:%s' % ( + start_date.strftime(utils.json_date_format), + end_date.strftime(utils.json_date_format), + ) day_date = week_start_date while day_date <= week_end_date: day = WEEKDAYS[day_date.weekday()] @@ -1139,16 +1211,15 @@ class ToulouseAxel(BaseResource): def get_min_and_max_possible_days(self, dui, reference_year, child_id): child_activities = self.get_child_activities( - dui=dui, - reference_year=reference_year, - child_id=child_id) + dui=dui, reference_year=reference_year, child_id=child_id + ) if not child_activities.get('ACTIVITE', []): return None, None entree_dates = [act['DATEENTREE'] for act in child_activities.get('ACTIVITE', [])] sortie_dates = [act['DATESORTIE'] for act in child_activities.get('ACTIVITE', [])] return ( datetime.datetime.strptime(max(entree_dates), utils.json_date_format).date(), - datetime.datetime.strptime(min(sortie_dates), utils.json_date_format).date() + datetime.datetime.strptime(min(sortie_dates), utils.json_date_format).date(), ) @endpoint( @@ -1162,8 +1233,11 @@ class ToulouseAxel(BaseResource): 'activity_type': {'description': _('Activity type (MAT, MIDI, SOIR, GARD)')}, 'start_date': {'description': _('Start date of the period')}, 'end_date': {'description': _('End date of the period')}, - }) - def clae_booking_activity_possible_days(self, request, NameID, idpersonne, activity_type, start_date, end_date): + }, + ) + def clae_booking_activity_possible_days( + self, request, NameID, idpersonne, activity_type, start_date, end_date + ): link = self.get_link(NameID) try: start_date = datetime.datetime.strptime(start_date, utils.json_date_format).date() @@ -1171,7 +1245,11 @@ class ToulouseAxel(BaseResource): except ValueError: raise APIError('bad date format, should be YYYY-MM-DD', err_code='bad-request', http_status=400) if activity_type not in ['MAT', 'MIDI', 'SOIR', 'GARD']: - raise APIError('bad activity_type, should be MAT, MIDI, SOIR or GARD', err_code='bad-request', http_status=400) + raise APIError( + 'bad activity_type, should be MAT, MIDI, SOIR or GARD', + err_code='bad-request', + http_status=400, + ) today = datetime.date.today() # be sure that start_date is after today + 8 days @@ -1181,7 +1259,8 @@ class ToulouseAxel(BaseResource): # and end_date is before smallest DATESORTIE reference_year = utils.get_reference_year_from_date(start_date) possible_days_min, possible_days_max = self.get_min_and_max_possible_days( - dui=link.dui, reference_year=reference_year, child_id=idpersonne) + dui=link.dui, reference_year=reference_year, child_id=idpersonne + ) if possible_days_min and possible_days_max: start_date = max(start_date, possible_days_min) end_date = min(end_date, possible_days_max) @@ -1193,7 +1272,8 @@ class ToulouseAxel(BaseResource): def get_activity_days_for_week(week_start_date, week_end_date): # ask Axel for the booking of a week (starts may be a monday, ends a friday) activities = self.get_booking_data( - dui=link.dui, child_id=idpersonne, booking_date=week_start_date).get('ACTIVITE', []) + dui=link.dui, child_id=idpersonne, booking_date=week_start_date + ).get('ACTIVITE', []) activity = None for act in activities: if act['TYPEACTIVITE'] == activity_type: @@ -1205,7 +1285,9 @@ class ToulouseAxel(BaseResource): while day_date <= week_end_date: day = WEEKDAYS[day_date.weekday()] activity_day = { - 'id': '{}:{}:{}:{}'.format(idpersonne, activity_type, activity['id'], day_date.strftime(utils.json_date_format)), + 'id': '{}:{}:{}:{}'.format( + idpersonne, activity_type, activity['id'], day_date.strftime(utils.json_date_format) + ), 'text': dateformat.format(day_date, 'l j F Y'), 'disabled': activity['booking']['days'][day] is None, 'prefill': activity['booking']['days'][day], @@ -1219,7 +1301,9 @@ class ToulouseAxel(BaseResource): activity_days = [] # cross all weeks until end date while week_end_date <= end_date: - activity_days += [d for d in get_activity_days_for_week(max(start_date, week_start_date), week_end_date)] + activity_days += [ + d for d in get_activity_days_for_week(max(start_date, week_start_date), week_end_date) + ] if week_end_date == end_date: break week_start_date = week_start_date + datetime.timedelta(days=7) @@ -1237,21 +1321,27 @@ class ToulouseAxel(BaseResource): 'idpersonne': {'description': _('Child ID')}, 'activity_type': {'description': _('Activity type (MAT, MIDI, SOIR, GARD)')}, 'booking_date': {'description': _('Booking date (to get reference year)')}, - }) - def clae_booking_activity_annual_possible_days(self, request, NameID, idpersonne, activity_type, booking_date): + }, + ) + def clae_booking_activity_annual_possible_days( + self, request, NameID, idpersonne, activity_type, booking_date + ): link = self.get_link(NameID) try: booking_date = datetime.datetime.strptime(booking_date, utils.json_date_format).date() except ValueError: raise APIError('bad date format, should be YYYY-MM-DD', err_code='bad-request', http_status=400) if activity_type not in ['MAT', 'MIDI', 'SOIR', 'GARD']: - raise APIError('bad activity_type, should be MAT, MIDI, SOIR or GARD', err_code='bad-request', http_status=400) + raise APIError( + 'bad activity_type, should be MAT, MIDI, SOIR or GARD', + err_code='bad-request', + http_status=400, + ) reference_year = utils.get_reference_year_from_date(booking_date) activities = self.get_child_activities( - dui=link.dui, - reference_year=reference_year, - child_id=idpersonne).get('ACTIVITE', []) + dui=link.dui, reference_year=reference_year, child_id=idpersonne + ).get('ACTIVITE', []) activity = None for act in activities: @@ -1267,11 +1357,13 @@ class ToulouseAxel(BaseResource): disabled = True elif activity_type == 'SOIR' and day == 'wednesday': disabled = True - activity_days.append({ - 'id': '{}:{}:{}:{}'.format(idpersonne, activity_type, activity['IDACTIVITE'], day), - 'text': WEEKDAYS_LABELS[i], - 'disabled': disabled, - }) + activity_days.append( + { + 'id': '{}:{}:{}:{}'.format(idpersonne, activity_type, activity['IDACTIVITE'], day), + 'text': WEEKDAYS_LABELS[i], + 'disabled': disabled, + } + ) return {'data': activity_days} @@ -1286,9 +1378,12 @@ class ToulouseAxel(BaseResource): 'activity_type': {'description': _('Activity type (MAT, MIDI, SOIR, GARD)')}, 'start_date': {'description': _('Start date of the period')}, 'end_date': {'description': _('End date of the period')}, - }) + }, + ) def clae_booking_activity_prefill(self, request, NameID, idpersonne, activity_type, start_date, end_date): - possible_days = self.clae_booking_activity_possible_days(request, NameID, idpersonne, activity_type, start_date, end_date) + possible_days = self.clae_booking_activity_possible_days( + request, NameID, idpersonne, activity_type, start_date, end_date + ) return {'data': [d['id'] for d in possible_days['data'] if d['prefill'] is True]} @endpoint( @@ -1305,27 +1400,42 @@ class ToulouseAxel(BaseResource): 'application/json': schemas.BOOKING_SCHEMA, } } - }) + }, + ) def clae_booking(self, request, NameID, post_data): link = self.get_link(NameID) # check dates today = datetime.date.today() start_date_min = today + datetime.timedelta(days=8) - start_date = datetime.datetime.strptime(post_data['booking_start_date'], utils.json_date_format).date() + start_date = datetime.datetime.strptime( + post_data['booking_start_date'], utils.json_date_format + ).date() reference_year = utils.get_reference_year_from_date(start_date) end_date_max = datetime.date(reference_year + 1, 7, 31) end_date = datetime.datetime.strptime(post_data['booking_end_date'], utils.json_date_format).date() if start_date > end_date: - raise APIError('booking_start_date should be before booking_end_date', err_code='bad-request', http_status=400) + raise APIError( + 'booking_start_date should be before booking_end_date', + err_code='bad-request', + http_status=400, + ) if start_date < start_date_min: - raise APIError('booking_start_date min value: %s' % start_date_min, err_code='bad-request', http_status=400) + raise APIError( + 'booking_start_date min value: %s' % start_date_min, err_code='bad-request', http_status=400 + ) if end_date > end_date_max: - raise APIError('booking_end_date max value: %s' % end_date_max, err_code='bad-request', http_status=400) + raise APIError( + 'booking_end_date max value: %s' % end_date_max, err_code='bad-request', http_status=400 + ) # get known activities for this child, to have the ids - child_activities_info = self.get_child_activities(dui=link.dui, reference_year=reference_year, child_id=post_data['child_id']) - child_known_activities_by_type = {a['TYPEACTIVITE']: a for a in child_activities_info.get('ACTIVITE', [])} + child_activities_info = self.get_child_activities( + dui=link.dui, reference_year=reference_year, child_id=post_data['child_id'] + ) + child_known_activities_by_type = { + a['TYPEACTIVITE']: a for a in child_activities_info.get('ACTIVITE', []) + } # build activity list to post activities_by_type = {} @@ -1347,9 +1457,16 @@ class ToulouseAxel(BaseResource): day_date = week_start_date # cross days of the week to find bookings while day_date <= week_end_date: - key = '{}:{}:{}:{}'.format(post_data['child_id'], activity_type, activity_id, day_date.strftime(utils.json_date_format)) + key = '{}:{}:{}:{}'.format( + post_data['child_id'], + activity_type, + activity_id, + day_date.strftime(utils.json_date_format), + ) if key in post_data['booking_list_%s' % activity_type]: - week_pattern = week_pattern[:day_date.weekday()] + '1' + week_pattern[day_date.weekday() + 1:] + week_pattern = ( + week_pattern[: day_date.weekday()] + '1' + week_pattern[day_date.weekday() + 1 :] + ) day_date = day_date + datetime.timedelta(days=1) return week_pattern @@ -1364,11 +1481,13 @@ class ToulouseAxel(BaseResource): booking_dates.add(real_start_date) activity_id = activity['IDACTIVITE'] week_pattern = get_week_pattern(real_start_date, real_end_date, activity_type, activity_id) - activity['PERIODE'].append({ - 'DATEDEBUT': real_start_date.strftime(utils.json_date_format), - 'DATEDFIN': real_end_date.strftime(utils.json_date_format), - 'SEMAINETYPE': week_pattern, - }) + activity['PERIODE'].append( + { + 'DATEDEBUT': real_start_date.strftime(utils.json_date_format), + 'DATEDFIN': real_end_date.strftime(utils.json_date_format), + 'SEMAINETYPE': week_pattern, + } + ) week_start_date = week_start_date + datetime.timedelta(days=7) week_end_date = week_end_date + datetime.timedelta(days=7) @@ -1381,7 +1500,7 @@ class ToulouseAxel(BaseResource): 'IDPERSONNE': post_data['child_id'], 'REGIME': post_data.get('regime') or child_activities_info['REGIME'], } - ] + ], } for activity_type in ['MAT', 'MIDI', 'SOIR', 'GARD']: if activity_type in activities_by_type: @@ -1395,8 +1514,8 @@ class ToulouseAxel(BaseResource): raise APIError( 'Axel error: %s' % e, err_code='error', - data={'xml_request': e.xml_request, - 'xml_response': e.xml_response}) + data={'xml_request': e.xml_request, 'xml_response': e.xml_response}, + ) # invalidate caches # invalidate get_children_activities cache @@ -1405,7 +1524,12 @@ class ToulouseAxel(BaseResource): for booking_date in sorted(booking_dates): # invalidate get_booking_data cache for each week crossed start_date, end_date = utils.get_week_dates_from_date(booking_date) - cache_key = 'toulouse-axel-%s-booking-data-%s-%s-%s' % (self.pk, link.dui, post_data['child_id'], start_date.isoformat()) + cache_key = 'toulouse-axel-%s-booking-data-%s-%s-%s' % ( + self.pk, + link.dui, + post_data['child_id'], + start_date.isoformat(), + ) cache.delete(cache_key) return { @@ -1413,7 +1537,7 @@ class ToulouseAxel(BaseResource): 'data': { 'xml_request': result.xml_request, 'xml_response': result.xml_response, - } + }, } @endpoint( @@ -1430,7 +1554,8 @@ class ToulouseAxel(BaseResource): 'application/json': schemas.ANNUAL_BOOKING_SCHEMA, } } - }) + }, + ) def clae_booking_annual(self, request, NameID, post_data): link = self.get_link(NameID) @@ -1440,11 +1565,13 @@ class ToulouseAxel(BaseResource): start_date = datetime.datetime.strptime(post_data['booking_date'], utils.json_date_format).date() start_date = max(start_date, start_date_min) reference_year = utils.get_reference_year_from_date(start_date) - end_date = datetime.date(reference_year+1, 7, 31) + end_date = datetime.date(reference_year + 1, 7, 31) # get known activities for this child, to have the ids child_activities_info = self.get_child_activities(link.dui, reference_year, post_data['child_id']) - child_known_activities_by_type = {a['TYPEACTIVITE']: a for a in child_activities_info.get('ACTIVITE', [])} + child_known_activities_by_type = { + a['TYPEACTIVITE']: a for a in child_activities_info.get('ACTIVITE', []) + } # build activity list to post activities_by_type = {} @@ -1464,11 +1591,13 @@ class ToulouseAxel(BaseResource): activities_by_type[activity_type] = { 'IDACTIVITE': activity_id, 'ANNEEREFERENCE': str(reference_year), - 'PERIODE': [{ - 'DATEDEBUT': start_date.strftime(utils.json_date_format), - 'DATEDFIN': end_date.strftime(utils.json_date_format), - 'SEMAINETYPE': week_pattern, - }], + 'PERIODE': [ + { + 'DATEDEBUT': start_date.strftime(utils.json_date_format), + 'DATEDFIN': end_date.strftime(utils.json_date_format), + 'SEMAINETYPE': week_pattern, + } + ], } # build data @@ -1480,7 +1609,7 @@ class ToulouseAxel(BaseResource): 'IDPERSONNE': post_data['child_id'], 'REGIME': post_data.get('regime') or child_activities_info['REGIME'], } - ] + ], } for activity_type in ['MAT', 'MIDI', 'SOIR', 'GARD']: if activity_type in activities_by_type: @@ -1494,8 +1623,8 @@ class ToulouseAxel(BaseResource): raise APIError( 'Axel error: %s' % e, err_code='error', - data={'xml_request': e.xml_request, - 'xml_response': e.xml_response}) + data={'xml_request': e.xml_request, 'xml_response': e.xml_response}, + ) # invalidate cache # invalidate get_children_activities cache @@ -1504,7 +1633,12 @@ class ToulouseAxel(BaseResource): booking_date = utils.get_week_dates_from_date(start_date)[0] while booking_date <= end_date: # invalidate get_booking_data cache for each monday from now to the end of the reference year - cache_key = 'toulouse-axel-%s-booking-data-%s-%s-%s' % (self.pk, link.dui, post_data['child_id'], booking_date.isoformat()) + cache_key = 'toulouse-axel-%s-booking-data-%s-%s-%s' % ( + self.pk, + link.dui, + post_data['child_id'], + booking_date.isoformat(), + ) cache.delete(cache_key) booking_date += datetime.timedelta(days=7) @@ -1513,7 +1647,7 @@ class ToulouseAxel(BaseResource): 'data': { 'xml_request': result.xml_request, 'xml_response': result.xml_response, - } + }, } diff --git a/passerelle/contrib/toulouse_axel/schemas.py b/passerelle/contrib/toulouse_axel/schemas.py index 61253040..19da22aa 100644 --- a/passerelle/contrib/toulouse_axel/schemas.py +++ b/passerelle/contrib/toulouse_axel/schemas.py @@ -60,7 +60,9 @@ class AxelSchema(JSONSchemaFromXMLSchema): def decode_date(self, data): value = datetime.datetime.strptime(data.text, utils.xml_date_format).strftime(utils.json_date_format) - return xmlschema.ElementData(tag=data.tag, text=value, content=data.content, attributes=data.attributes) + return xmlschema.ElementData( + tag=data.tag, text=value, content=data.content, attributes=data.attributes + ) def decode_date_optional(self, data): if not data.text: @@ -78,7 +80,9 @@ class AxelSchema(JSONSchemaFromXMLSchema): value = False if data.text.lower() == 'oui': value = True - return xmlschema.ElementData(tag=data.tag, text=value, content=data.content, attributes=data.attributes) + return xmlschema.ElementData( + tag=data.tag, text=value, content=data.content, attributes=data.attributes + ) @classmethod def schema_bool_optional(cls): @@ -108,12 +112,13 @@ OperationResult = namedtuple('OperationResult', ['json_response', 'xml_request', class Operation(object): def __init__(self, operation, prefix='Dui/', request_root_element='PORTAIL'): self.operation = operation - self.request_converter = xml_schema_converter('%sQ_%s.xsd' % (prefix, operation), request_root_element) + self.request_converter = xml_schema_converter( + '%sQ_%s.xsd' % (prefix, operation), request_root_element + ) self.response_converter = xml_schema_converter('%sR_%s.xsd' % (prefix, operation), 'PORTAILSERVICE') self.name = re.sub( - '(.?)([A-Z])', - lambda s: s.group(1) + ('-' if s.group(1) else '') + s.group(2).lower(), - operation) + '(.?)([A-Z])', lambda s: s.group(1) + ('-' if s.group(1) else '') + s.group(2).lower(), operation + ) self.snake_name = self.name.replace('-', '_') @property @@ -137,36 +142,29 @@ class Operation(object): try: self.request_converter.xml_schema.validate(serialized_request) except xmlschema.XMLSchemaValidationError as e: - raise AxelError( - 'invalid request %s' % str(e), - xml_request=serialized_request) + raise AxelError('invalid request %s' % str(e), xml_request=serialized_request) result = client.service.getData( - self.operation, - serialized_request, - '') # FIXME: What is the user parameter for ? + self.operation, serialized_request, '' + ) # FIXME: What is the user parameter for ? xml_result = ET.fromstring(result.encode('utf-8')) utils.indent(xml_result) pretty_result = force_text(ET.tostring(xml_result)) if xml_result.find('RESULTAT/STATUS').text != 'OK': msg = xml_result.find('RESULTAT/COMMENTAIRES').text - raise AxelError( - msg, - xml_request=serialized_request, - xml_response=pretty_result) + raise AxelError(msg, xml_request=serialized_request, xml_response=pretty_result) try: return OperationResult( json_response=self.response_converter.decode(xml_result), xml_request=serialized_request, - xml_response=pretty_result + xml_response=pretty_result, ) except xmlschema.XMLSchemaValidationError as e: raise AxelError( - 'invalid response %s' % str(e), - xml_request=serialized_request, - xml_response=pretty_result) + 'invalid response %s' % str(e), xml_request=serialized_request, xml_response=pretty_result + ) ref_date_gestion_dui = Operation('RefDateGestionDui') @@ -188,9 +186,9 @@ PAYMENT_SCHEMA = { 'transaction_date': copy.deepcopy(utils.datetime_type), 'transaction_id': { 'type': 'string', - } + }, }, - 'required': ['transaction_date', 'transaction_id'] + 'required': ['transaction_date', 'transaction_id'], } LINK_SCHEMA = copy.deepcopy(ref_verif_dui.request_schema['properties']['PORTAIL']['properties']['DUI']) @@ -212,20 +210,23 @@ UPDATE_FAMILY_REQUIRED_FLAGS = [ 'maj:revenus', ] for i in range(0, 6): - UPDATE_FAMILY_FLAGS.update({ - 'maj:enfant_%s' % i: 'ENFANT/%s' % i, - 'maj:enfant_%s_sanitaire' % i: 'ENFANT/%s/SANITAIRE' % i, - 'maj:enfant_%s_sanitaire_medecin' % i: 'ENFANT/%s/SANITAIRE/MEDECIN' % i, - 'maj:enfant_%s_sanitaire_vaccin' % i: 'ENFANT/%s/SANITAIRE/VACCIN' % i, - 'maj:enfant_%s_sanitaire_allergie' % i: 'ENFANT/%s/SANITAIRE/ALLERGIE' % i, - 'maj:enfant_%s_sanitaire_handicap' % i: 'ENFANT/%s/SANITAIRE/HANDICAP' % i, - 'maj:enfant_%s_assurance' % i: 'ENFANT/%s/ASSURANCE' % i, - 'maj:enfant_%s_contact' % i: 'ENFANT/%s/CONTACT' % i, - }) + UPDATE_FAMILY_FLAGS.update( + { + 'maj:enfant_%s' % i: 'ENFANT/%s' % i, + 'maj:enfant_%s_sanitaire' % i: 'ENFANT/%s/SANITAIRE' % i, + 'maj:enfant_%s_sanitaire_medecin' % i: 'ENFANT/%s/SANITAIRE/MEDECIN' % i, + 'maj:enfant_%s_sanitaire_vaccin' % i: 'ENFANT/%s/SANITAIRE/VACCIN' % i, + 'maj:enfant_%s_sanitaire_allergie' % i: 'ENFANT/%s/SANITAIRE/ALLERGIE' % i, + 'maj:enfant_%s_sanitaire_handicap' % i: 'ENFANT/%s/SANITAIRE/HANDICAP' % i, + 'maj:enfant_%s_assurance' % i: 'ENFANT/%s/ASSURANCE' % i, + 'maj:enfant_%s_contact' % i: 'ENFANT/%s/CONTACT' % i, + } + ) UPDATE_FAMILY_REQUIRED_FLAGS.append('maj:enfant_%s' % i) UPDATE_FAMILY_SCHEMA = copy.deepcopy( - form_maj_famille_dui.request_schema['properties']['PORTAIL']['properties']['DUI']) + form_maj_famille_dui.request_schema['properties']['PORTAIL']['properties']['DUI'] +) for flag in sorted(UPDATE_FAMILY_FLAGS.keys()): flag_type = copy.deepcopy(utils.boolean_type) @@ -257,8 +258,12 @@ handicap_fields = [ 'INDICATEURHANDICAP', 'INDICATEURNOTIFMDPH', ] -sanitaire_properties = UPDATE_FAMILY_SCHEMA['properties']['ENFANT']['items']['properties']['SANITAIRE']['properties'] -sanitaire_required = UPDATE_FAMILY_SCHEMA['properties']['ENFANT']['items']['properties']['SANITAIRE']['required'] +sanitaire_properties = UPDATE_FAMILY_SCHEMA['properties']['ENFANT']['items']['properties']['SANITAIRE'][ + 'properties' +] +sanitaire_required = UPDATE_FAMILY_SCHEMA['properties']['ENFANT']['items']['properties']['SANITAIRE'][ + 'required' +] sanitaire_properties['HANDICAP'] = { 'type': 'object', 'properties': {}, @@ -289,7 +294,7 @@ sanitaire_properties['ALLERGIE']['properties']['AUTRES'] = { 'type': 'string', 'minLength': 0, 'maxLength': 50, - } + }, ] } @@ -308,8 +313,8 @@ BOOKING_SCHEMA = { 'items': { 'type': 'string', 'pattern': '[A-Za-z0-9]+:MAT:[A-Za-z0-9]+:[0-9]{4}-[0-9]{2}-[0-9]{2}', - } - } + }, + }, ] }, 'booking_list_MIDI': { @@ -320,8 +325,8 @@ BOOKING_SCHEMA = { 'items': { 'type': 'string', 'pattern': '[A-Za-z0-9]+:MIDI:[A-Za-z0-9]+:[0-9]{4}-[0-9]{2}-[0-9]{2}', - } - } + }, + }, ] }, 'booking_list_SOIR': { @@ -332,8 +337,8 @@ BOOKING_SCHEMA = { 'items': { 'type': 'string', 'pattern': '[A-Za-z0-9]+:SOIR:[A-Za-z0-9]+:[0-9]{4}-[0-9]{2}-[0-9]{2}', - } - } + }, + }, ] }, 'booking_list_GARD': { @@ -344,8 +349,8 @@ BOOKING_SCHEMA = { 'items': { 'type': 'string', 'pattern': '[A-Za-z0-9]+:GARD:[A-Za-z0-9]+:[0-9]{4}-[0-9]{2}-[0-9]{2}', - } - } + }, + }, ] }, 'child_id': { @@ -353,14 +358,17 @@ BOOKING_SCHEMA = { 'minLength': 1, 'maxLength': 8, }, - 'regime': { - 'oneOf': [ - {'type': 'null'}, - {'type': 'string', 'enum': ['', 'SV', 'AV']} - ] - } + 'regime': {'oneOf': [{'type': 'null'}, {'type': 'string', 'enum': ['', 'SV', 'AV']}]}, }, - 'required': ['booking_start_date', 'booking_end_date', 'booking_list_MAT', 'booking_list_MIDI', 'booking_list_SOIR', 'booking_list_GARD', 'child_id'] + 'required': [ + 'booking_start_date', + 'booking_end_date', + 'booking_list_MAT', + 'booking_list_MIDI', + 'booking_list_SOIR', + 'booking_list_GARD', + 'child_id', + ], } @@ -375,8 +383,8 @@ ANNUAL_BOOKING_SCHEMA = { 'items': { 'type': 'string', 'pattern': '[A-Za-z0-9]+:MAT:[A-Za-z0-9]+:(monday|tuesday|wednesday|thursday|friday)', - } - } + }, + }, ] }, 'booking_list_MIDI': { @@ -387,8 +395,8 @@ ANNUAL_BOOKING_SCHEMA = { 'items': { 'type': 'string', 'pattern': '[A-Za-z0-9]+:MIDI:[A-Za-z0-9]+:(monday|tuesday|wednesday|thursday|friday)', - } - } + }, + }, ] }, 'booking_list_SOIR': { @@ -399,8 +407,8 @@ ANNUAL_BOOKING_SCHEMA = { 'items': { 'type': 'string', 'pattern': '[A-Za-z0-9]+:SOIR:[A-Za-z0-9]+:(monday|tuesday|wednesday|thursday|friday)', - } - } + }, + }, ] }, 'booking_list_GARD': { @@ -411,8 +419,8 @@ ANNUAL_BOOKING_SCHEMA = { 'items': { 'type': 'string', 'pattern': '[A-Za-z0-9]+:GARD:[A-Za-z0-9]+:(monday|tuesday|wednesday|thursday|friday)', - } - } + }, + }, ] }, 'child_id': { @@ -420,13 +428,15 @@ ANNUAL_BOOKING_SCHEMA = { 'minLength': 1, 'maxLength': 8, }, - 'regime': { - 'oneOf': [ - {'type': 'null'}, - {'type': 'string', 'enum': ['', 'SV', 'AV']} - ] - }, - 'booking_date': copy.deepcopy(utils.date_type) + 'regime': {'oneOf': [{'type': 'null'}, {'type': 'string', 'enum': ['', 'SV', 'AV']}]}, + 'booking_date': copy.deepcopy(utils.date_type), }, - 'required': ['booking_list_MAT', 'booking_list_MIDI', 'booking_list_SOIR', 'booking_list_GARD', 'child_id', 'booking_date'] + 'required': [ + 'booking_list_MAT', + 'booking_list_MIDI', + 'booking_list_SOIR', + 'booking_list_GARD', + 'child_id', + 'booking_date', + ], } diff --git a/passerelle/contrib/toulouse_axel/utils.py b/passerelle/contrib/toulouse_axel/utils.py index 26fe1c26..f8c8fa9d 100644 --- a/passerelle/contrib/toulouse_axel/utils.py +++ b/passerelle/contrib/toulouse_axel/utils.py @@ -35,7 +35,7 @@ boolean_type = { { 'type': 'string', 'pattern': '[Oo][Uu][Ii]|[Nn][Oo][Nn]|[Tt][Rr][Uu][Ee]|[Ff][Aa][Ll][Ss][Ee]|1|0', - } + }, ] } date_type = { @@ -52,51 +52,61 @@ xml_date_format = '%d/%m/%Y' xml_datetime_format = '%d/%m/%Y %H:%M:%S' -situation_familiale_mapping = OrderedDict([ - ('C', 'Célibataire'), - ('D', 'Divorcé (e)'), - ('M', 'Marié (e)'), - ('S', 'Séparé (e)'), - ('V', 'Veuf (ve)'), - ('VM', 'Vie maritale'), - ('P', 'Pacs'), -]) +situation_familiale_mapping = OrderedDict( + [ + ('C', 'Célibataire'), + ('D', 'Divorcé (e)'), + ('M', 'Marié (e)'), + ('S', 'Séparé (e)'), + ('V', 'Veuf (ve)'), + ('VM', 'Vie maritale'), + ('P', 'Pacs'), + ] +) -csp_mapping = OrderedDict([ - ('OUV', 'Ouvriers'), - ('EMP', 'Employés'), - ('ETU', 'Etudiants'), - ('RET', 'Retraités'), - ('STA', 'Personnes en stage'), - ('AGEX', 'Agriculteurs exploitants'), - ('ARCO', 'Artisans commercants chefs entreprise'), - ('CADP', 'Cadres professions intellectuelles supérieures'), - ('PRIN', 'Professions intermediaires'), - ('SACT', 'Autres personnes sans activité professionnelle'), - ('REC', 'Recherche emploi'), -]) +csp_mapping = OrderedDict( + [ + ('OUV', 'Ouvriers'), + ('EMP', 'Employés'), + ('ETU', 'Etudiants'), + ('RET', 'Retraités'), + ('STA', 'Personnes en stage'), + ('AGEX', 'Agriculteurs exploitants'), + ('ARCO', 'Artisans commercants chefs entreprise'), + ('CADP', 'Cadres professions intellectuelles supérieures'), + ('PRIN', 'Professions intermediaires'), + ('SACT', 'Autres personnes sans activité professionnelle'), + ('REC', 'Recherche emploi'), + ] +) -lien_parente_mapping = OrderedDict([ - ('GRP1', 'Grands-parents paternels'), - ('GRP2', 'Grands-parents maternels'), - ('VOI', 'Voisin'), - ('FRE', "Frère de l'enfant"), - ('SOE', "Soeur de l'enfant"), - ('AMI', 'Ami (e)'), - ('FAMI', 'Membre de la famille'), - ('BABY', 'Baby sitter'), -]) +lien_parente_mapping = OrderedDict( + [ + ('GRP1', 'Grands-parents paternels'), + ('GRP2', 'Grands-parents maternels'), + ('VOI', 'Voisin'), + ('FRE', "Frère de l'enfant"), + ('SOE', "Soeur de l'enfant"), + ('AMI', 'Ami (e)'), + ('FAMI', 'Membre de la famille'), + ('BABY', 'Baby sitter'), + ] +) -type_regime_mapping = OrderedDict([ - ('GENE', 'Régime général'), - ('ZAU', 'Autre'), - ('MSA', 'MSA'), -]) +type_regime_mapping = OrderedDict( + [ + ('GENE', 'Régime général'), + ('ZAU', 'Autre'), + ('MSA', 'MSA'), + ] +) -regime_mapping = OrderedDict([ - ('AV', 'Menu avec viande'), - ('SV', 'Menu sans viande'), -]) +regime_mapping = OrderedDict( + [ + ('AV', 'Menu avec viande'), + ('SV', 'Menu sans viande'), + ] +) def get_label(mapping, code): @@ -186,24 +196,28 @@ def normalize_invoice(invoice, dui, historical=False, vendor_base=None): 'vendor': {'toulouse-axel': vendor}, } if historical: - data.update({ - 'amount': 0, - 'total_amount': invoice['MONTANT'], - 'created': invoice['EMISSION'], - 'pay_limit_date': '', - 'online_payment': False, - 'has_pdf': invoice['IPDF'] == 'O', - }) + data.update( + { + 'amount': 0, + 'total_amount': invoice['MONTANT'], + 'created': invoice['EMISSION'], + 'pay_limit_date': '', + 'online_payment': False, + 'has_pdf': invoice['IPDF'] == 'O', + } + ) else: - data.update({ - 'amount': invoice['RESTEAPAYER'], - 'total_amount': invoice['MONTANTTOTAL'], - 'amount_paid': max(0, invoice['MONTANTTOTAL'] - invoice['RESTEAPAYER']) or '', - 'created': invoice['DATEEMISSION'], - 'pay_limit_date': invoice['DATEECHEANCE'], - 'online_payment': bool(invoice['RESTEAPAYER'] > 0), - 'has_pdf': invoice['EXISTEPDF'] == '1', - }) + data.update( + { + 'amount': invoice['RESTEAPAYER'], + 'total_amount': invoice['MONTANTTOTAL'], + 'amount_paid': max(0, invoice['MONTANTTOTAL'] - invoice['RESTEAPAYER']) or '', + 'created': invoice['DATEEMISSION'], + 'pay_limit_date': invoice['DATEECHEANCE'], + 'online_payment': bool(invoice['RESTEAPAYER'] > 0), + 'has_pdf': invoice['EXISTEPDF'] == '1', + } + ) return data @@ -217,10 +231,7 @@ def get_reference_year_from_date(booking_date): def get_week_dates_from_date(booking_date): day = booking_date.weekday() # return monday and friday of the week - return ( - booking_date - datetime.timedelta(days=day), - booking_date + datetime.timedelta(days=4 - day) - ) + return (booking_date - datetime.timedelta(days=day), booking_date + datetime.timedelta(days=4 - day)) def get_booking(value): diff --git a/passerelle/plugins.py b/passerelle/plugins.py index f26d93e9..64bfb7f6 100644 --- a/passerelle/plugins.py +++ b/passerelle/plugins.py @@ -21,9 +21,9 @@ from .urls_utils import decorated_includes, required, app_enabled, manager_requi def register_apps_urls(urlpatterns): - '''Call get_before_urls and get_after_urls on all apps providing them, - add those urls to the given urlpatterns (before or after). - ''' + """Call get_before_urls and get_after_urls on all apps providing them, + add those urls to the given urlpatterns (before or after). + """ before_urls = [] after_urls = [] for app in apps.get_app_configs(): diff --git a/passerelle/settings.py b/passerelle/settings.py index 60be4a0f..d90f3223 100644 --- a/passerelle/settings.py +++ b/passerelle/settings.py @@ -12,6 +12,7 @@ except ImportError: def emit(self, record): pass + logging.getLogger('passerelle').addHandler(NullHandler()) # Build paths inside the project like this: os.path.join(BASE_DIR, ...) @@ -63,8 +64,7 @@ STATICFILES_DIRS = (os.path.join(BASE_DIR, 'passerelle', 'static'),) # List of finder classes that know how to find static files in # various locations. -STATICFILES_FINDERS = list(global_settings.STATICFILES_FINDERS) + \ - ['gadjo.finders.XStaticFinder'] +STATICFILES_FINDERS = list(global_settings.STATICFILES_FINDERS) + ['gadjo.finders.XStaticFinder'] MIDDLEWARE = ( 'django.contrib.sessions.middleware.SessionMiddleware', @@ -88,9 +88,7 @@ LANGUAGE_CODE = 'fr-fr' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [ - os.path.join(BASE_DIR, 'passerelle', 'templates') - ], + 'DIRS': [os.path.join(BASE_DIR, 'passerelle', 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ @@ -217,14 +215,12 @@ REQUESTS_PROXIES = None REQUESTS_TIMEOUT = 25 # Passerelle can receive big requests (for example base64 encoded files) -DATA_UPLOAD_MAX_MEMORY_SIZE = 100*1024*1024 +DATA_UPLOAD_MAX_MEMORY_SIZE = 100 * 1024 * 1024 SITE_BASE_URL = 'http://localhost' # List of passerelle.utils.Request response Content-Type to log -LOGGED_CONTENT_TYPES_MESSAGES = ( - r'text/', r'application/(json|xml)' -) +LOGGED_CONTENT_TYPES_MESSAGES = (r'text/', r'application/(json|xml)') # Max size of the response to log LOGGED_RESPONSES_MAX_SIZE = 5000 @@ -243,7 +239,7 @@ LOGGING = { 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', - }, + }, }, 'loggers': { 'django.request': { @@ -259,7 +255,8 @@ LOGGING = { }, } -local_settings_file = os.environ.get('PASSERELLE_SETTINGS_FILE', - os.path.join(os.path.dirname(__file__), 'local_settings.py')) +local_settings_file = os.environ.get( + 'PASSERELLE_SETTINGS_FILE', os.path.join(os.path.dirname(__file__), 'local_settings.py') +) if os.path.exists(local_settings_file): exec(open(local_settings_file).read()) diff --git a/passerelle/sms/forms.py b/passerelle/sms/forms.py index dba06a09..f0baa355 100644 --- a/passerelle/sms/forms.py +++ b/passerelle/sms/forms.py @@ -1,6 +1,7 @@ from django import forms from django.utils.translation import ugettext_lazy as _ + class SmsTestSendForm(forms.Form): number = forms.CharField(label=_('To'), max_length=12) sender = forms.CharField(label=_('From'), max_length=12) diff --git a/passerelle/sms/migrations/0001_initial.py b/passerelle/sms/migrations/0001_initial.py index 6efa306f..86ef962e 100644 --- a/passerelle/sms/migrations/0001_initial.py +++ b/passerelle/sms/migrations/0001_initial.py @@ -19,7 +19,12 @@ class Migration(migrations.Migration): migrations.CreateModel( name='SMSLog', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name='ID' + ), + ), ('timestamp', models.DateTimeField(auto_now_add=True)), ('appname', models.CharField(max_length=128, null=True, verbose_name='appname')), ('slug', models.CharField(max_length=128, null=True, verbose_name='slug')), diff --git a/passerelle/sms/models.py b/passerelle/sms/models.py index 2461a249..9de09e26 100644 --- a/passerelle/sms/models.py +++ b/passerelle/sms/models.py @@ -42,25 +42,26 @@ SEND_SCHEMA = { 'to': { 'description': 'Destination numbers', "type": "array", - "items": { - 'type': 'string', - 'pattern': r'^\+?[-.\s/\d]+$' - }, + "items": {'type': 'string', 'pattern': r'^\+?[-.\s/\d]+$'}, }, - } + }, } class SMSResource(BaseResource): category = _('SMS Providers') - documentation_url = 'https://doc-publik.entrouvert.com/admin-fonctionnel/les-tutos/configuration-envoi-sms/' + documentation_url = ( + 'https://doc-publik.entrouvert.com/admin-fonctionnel/les-tutos/configuration-envoi-sms/' + ) _can_send_messages_description = _('Sending messages is limited to the following API users:') - default_country_code = models.CharField(verbose_name=_('Default country code'), max_length=3, - default=u'33') - default_trunk_prefix = models.CharField(verbose_name=_('Default trunk prefix'), max_length=2, - default=u'0') # Yeah France first ! + default_country_code = models.CharField( + verbose_name=_('Default country code'), max_length=3, default=u'33' + ) + default_trunk_prefix = models.CharField( + verbose_name=_('Default trunk prefix'), max_length=2, default=u'0' + ) # Yeah France first ! # FIXME: add regexp field, to check destination and from format max_message_length = models.IntegerField(_('Maximum message length'), default=160) @@ -83,25 +84,34 @@ class SMSResource(BaseResource): # assumes 00 is international access code, remove it pass elif number.startswith(self.default_trunk_prefix): - number = '00' + self.default_country_code + number[len(self.default_trunk_prefix):] + number = '00' + self.default_country_code + number[len(self.default_trunk_prefix) :] else: - raise APIError('phone number %r is unsupported (no international prefix, ' - 'no local trunk prefix)' % number) + raise APIError( + 'phone number %r is unsupported (no international prefix, ' + 'no local trunk prefix)' % number + ) numbers.append(number) return numbers - @endpoint(perm='can_send_messages', methods=['post'], - description=_('Send a SMS message'), - parameters={'nostop': {'description': _('Do not send STOP instruction'), 'example_value': '1'}}, - post={'request_body': {'schema': {'application/json': SEND_SCHEMA}}}) + @endpoint( + perm='can_send_messages', + methods=['post'], + description=_('Send a SMS message'), + parameters={'nostop': {'description': _('Do not send STOP instruction'), 'example_value': '1'}}, + post={'request_body': {'schema': {'application/json': SEND_SCHEMA}}}, + ) def send(self, request, post_data, nostop=None): - post_data['message'] = post_data['message'][:self.max_message_length] + post_data['message'] = post_data['message'][: self.max_message_length] post_data['to'] = self.clean_numbers(post_data['to']) logging.info('sending SMS to %r from %r', post_data['to'], post_data['from']) stop = nostop is None # ?nostop in not in query string - self.add_job('send_job', - text=post_data['message'], sender=post_data['from'], destinations=post_data['to'], - stop=stop) + self.add_job( + 'send_job', + text=post_data['message'], + sender=post_data['from'], + destinations=post_data['to'], + stop=stop, + ) return {'err': 0} def send_job(self, *args, **kwargs): diff --git a/passerelle/sms/urls.py b/passerelle/sms/urls.py index c58df515..8d568511 100644 --- a/passerelle/sms/urls.py +++ b/passerelle/sms/urls.py @@ -3,6 +3,5 @@ from django.conf.urls import url from . import views management_urlpatterns = [ - url(r'^(?P[\w,-]+)/test-send/$', - views.SmsTestSendView.as_view(), name='sms-test-send'), + url(r'^(?P[\w,-]+)/test-send/$', views.SmsTestSendView.as_view(), name='sms-test-send'), ] diff --git a/passerelle/sms/views.py b/passerelle/sms/views.py index d4e4edad..3c34a4db 100644 --- a/passerelle/sms/views.py +++ b/passerelle/sms/views.py @@ -29,8 +29,7 @@ class SmsTestSendView(GenericConnectorMixin, FormView): connector = self.get_object() try: number = connector.clean_numbers([number])[0] - connector.send_msg( - text=message, sender=sender, destinations=[number], stop=False) + connector.send_msg(text=message, sender=sender, destinations=[number], stop=False) except APIError as exc: messages.error(self.request, _('Sending SMS fails: %s' % exc)) else: diff --git a/passerelle/soap.py b/passerelle/soap.py index 2d4bc1c9..5c011f9f 100644 --- a/passerelle/soap.py +++ b/passerelle/soap.py @@ -13,8 +13,7 @@ def client_to_jsondict(client): for p in sd.ports: d['ports'][p[0].name] = {} for m in p[1]: - d['ports'][p[0].name][m[0]] = dict( - (mp[0], sd.xlate(mp[1])) for mp in m[1]) + d['ports'][p[0].name][m[0]] = dict((mp[0], sd.xlate(mp[1])) for mp in m[1]) d['types'] = {} for t in sd.types: ft = client.factory.create(sd.xlate(t[0])) diff --git a/passerelle/urls.py b/passerelle/urls.py index d459d196..d3de0209 100644 --- a/passerelle/urls.py +++ b/passerelle/urls.py @@ -9,11 +9,21 @@ from django.views.static import serve as static_serve from .api.urls import urlpatterns as api_urls from .views import ( - HomePageView, ManageView, ManageAddView, - GenericCreateConnectorView, GenericDeleteConnectorView, - GenericEditConnectorView, GenericEndpointView, GenericConnectorView, - GenericViewLogsConnectorView, GenericLogView, GenericExportConnectorView, - login, logout, menu_json) + HomePageView, + ManageView, + ManageAddView, + GenericCreateConnectorView, + GenericDeleteConnectorView, + GenericEditConnectorView, + GenericEndpointView, + GenericConnectorView, + GenericViewLogsConnectorView, + GenericLogView, + GenericExportConnectorView, + login, + logout, + menu_json, +) from .base.views import GenericViewJobsConnectorView, GenericJobView, GenericRestartJobView from .urls_utils import decorated_includes, manager_required from .base.urls import access_urlpatterns, import_export_urlpatterns @@ -24,20 +34,19 @@ admin.autodiscover() urlpatterns = [ url(r'^$', HomePageView.as_view(), name='homepage'), - url(r'^manage/$', manager_required(ManageView.as_view()), name='manage-home'), url(r'^manage/menu.json$', manager_required(menu_json), name='menu-json'), url(r'^manage/add$', manager_required(ManageAddView.as_view()), name='add-connector'), - - url(r'^media/(?P.*)$', login_required(static_serve), { - 'document_root': settings.MEDIA_ROOT, - }), + url( + r'^media/(?P.*)$', + login_required(static_serve), + { + 'document_root': settings.MEDIA_ROOT, + }, + ), url(r'^admin/', admin.site.urls), - - url(r'^manage/access/', - decorated_includes(manager_required, include(access_urlpatterns))), - url(r'^manage/', - decorated_includes(manager_required, include(import_export_urlpatterns))), + url(r'^manage/access/', decorated_includes(manager_required, include(access_urlpatterns))), + url(r'^manage/', decorated_includes(manager_required, include(import_export_urlpatterns))), url('^api/', include(api_urls)), ] @@ -55,38 +64,71 @@ if 'mellon' in settings.INSTALLED_APPS: urlpatterns += [ - url(r'^manage/(?P[\w,-]+)/', decorated_includes(manager_required, - include([ - url(r'^add$', - GenericCreateConnectorView.as_view(), name='create-connector'), - url(r'^(?P[\w,-]+)/delete$', - GenericDeleteConnectorView.as_view(), name='delete-connector'), - url(r'^(?P[\w,-]+)/edit$', - GenericEditConnectorView.as_view(), name='edit-connector'), - url(r'^(?P[\w,-]+)/logs/$', - GenericViewLogsConnectorView.as_view(), name='view-logs-connector'), - url(r'^(?P[\w,-]+)/logs/(?P\d+)/$', - GenericLogView.as_view(), name='view-log'), - url(r'^(?P[\w,-]+)/jobs/$', - GenericViewJobsConnectorView.as_view(), name='view-jobs-connector'), - url(r'^(?P[\w,-]+)/jobs/(?P\d+)/$', - GenericJobView.as_view(), name='view-job'), - url(r'^(?P[\w,-]+)/jobs/(?P\d+)/restart/$', - GenericRestartJobView.as_view(), name='restart-job'), - url(r'^(?P[\w,-]+)/export$', - GenericExportConnectorView.as_view(), name='export-connector'), - ]))) + url( + r'^manage/(?P[\w,-]+)/', + decorated_includes( + manager_required, + include( + [ + url(r'^add$', GenericCreateConnectorView.as_view(), name='create-connector'), + url( + r'^(?P[\w,-]+)/delete$', + GenericDeleteConnectorView.as_view(), + name='delete-connector', + ), + url( + r'^(?P[\w,-]+)/edit$', GenericEditConnectorView.as_view(), name='edit-connector' + ), + url( + r'^(?P[\w,-]+)/logs/$', + GenericViewLogsConnectorView.as_view(), + name='view-logs-connector', + ), + url( + r'^(?P[\w,-]+)/logs/(?P\d+)/$', + GenericLogView.as_view(), + name='view-log', + ), + url( + r'^(?P[\w,-]+)/jobs/$', + GenericViewJobsConnectorView.as_view(), + name='view-jobs-connector', + ), + url( + r'^(?P[\w,-]+)/jobs/(?P\d+)/$', + GenericJobView.as_view(), + name='view-job', + ), + url( + r'^(?P[\w,-]+)/jobs/(?P\d+)/restart/$', + GenericRestartJobView.as_view(), + name='restart-job', + ), + url( + r'^(?P[\w,-]+)/export$', + GenericExportConnectorView.as_view(), + name='export-connector', + ), + ] + ), + ), + ) ] urlpatterns += [ - url(r'^(?P[\w,-]+)/(?P[\w,-]+)/$', - GenericConnectorView.as_view(), name='view-connector'), - url(r'^(?P[\w,-]+)/(?P[\w,-]+)/(?P[\w,-]+)(?:/(?P.*))?$', - GenericEndpointView.as_view(), name='generic-endpoint') + url( + r'^(?P[\w,-]+)/(?P[\w,-]+)/$', GenericConnectorView.as_view(), name='view-connector' + ), + url( + r'^(?P[\w,-]+)/(?P[\w,-]+)/(?P[\w,-]+)(?:/(?P.*))?$', + GenericEndpointView.as_view(), + name='generic-endpoint', + ), ] if settings.DEBUG and 'debug_toolbar' in settings.INSTALLED_APPS: import debug_toolbar + urlpatterns = [ url(r'^__debug__/', include(debug_toolbar.urls)), ] + urlpatterns diff --git a/passerelle/urls_utils.py b/passerelle/urls_utils.py index 09f5195e..d0436eee 100644 --- a/passerelle/urls_utils.py +++ b/passerelle/urls_utils.py @@ -5,6 +5,7 @@ from functools import wraps from django.conf import settings from django.contrib.auth.decorators import user_passes_test from django.core.exceptions import PermissionDenied + try: from django.urls import URLPattern, URLResolver except ImportError: @@ -13,6 +14,7 @@ except ImportError: from django.views.debug import technical_404_response from django.http import Http404 + class DecoratedURLPattern(URLPattern): def resolve(self, *args, **kwargs): result = super(DecoratedURLPattern, self).resolve(*args, **kwargs) @@ -20,6 +22,7 @@ class DecoratedURLPattern(URLPattern): result.func = self._decorate_with(result.func) return result + class DecoratedURLResolver(URLResolver): def resolve(self, *args, **kwargs): result = super(DecoratedURLResolver, self).resolve(*args, **kwargs) @@ -27,6 +30,7 @@ class DecoratedURLResolver(URLResolver): result.func = self._decorate_with(result.func) return result + def decorated_includes(func, includes, *args, **kwargs): urlconf_module, app_name, namespace = includes @@ -48,33 +52,44 @@ def decorated_includes(func, includes, *args, **kwargs): # [... urls for applabel ...] # ) + def unless(test, message): '''Decorator returning a 404 status code if some condition is not met''' + def decorator(func): @wraps(func) def f(request, *args, **kwargs): if not test(): return technical_404_response(request, Http404(message)) return func(request, *args, **kwargs) + return f + return decorator + def app_enabled(app_label): '''for enabling a view based on PASSERELLE_APP__ENABLED flag''' + def test(): return getattr(settings, 'PASSERELLE_APP_%s_ENABLED' % app_label.upper(), True) + return unless(test, 'please enable %s' % app_label) + def setting_enabled(name): '''for enabling a view based on a setting''' + def test(): return getattr(settings, name, False) + return unless(test, 'please enable %s' % name) + # code bellow is borrowed from https://djangosnippets.org/snippets/2607/ # or https://gist.github.com/sjzabel/1378003 -def required(wrapping_functions,patterns_rslt): - ''' +def required(wrapping_functions, patterns_rslt): + """ Used to require 1..n decorators in any view returned by a url tree Usage: @@ -93,14 +108,12 @@ def required(wrapping_functions,patterns_rslt): partial(login_required,login_url='/accounts/login/'), [...urls...] ) - ''' + """ if not hasattr(wrapping_functions, '__iter__'): - wrapping_functions = (wrapping_functions, ) + wrapping_functions = (wrapping_functions,) + + return [_wrap_instance__resolve(wrapping_functions, instance) for instance in patterns_rslt] - return [ - _wrap_instance__resolve(wrapping_functions, instance) - for instance in patterns_rslt - ] def _wrap_instance__resolve(wrapping_functions, instance): def _wrap_func_in_returned_resolver_match(*args, **kwargs): @@ -113,12 +126,14 @@ def _wrap_instance__resolve(wrapping_functions, instance): f = _f(f) setattr(rslt, 'func', f) return rslt + if not hasattr(instance, 'resolve'): return instance resolve = getattr(instance, 'resolve') setattr(instance, 'resolve', _wrap_func_in_returned_resolver_match) return instance + def manager_required(function=None, login_url=None): def check_manager(user): if user and user.is_staff: @@ -127,6 +142,7 @@ def manager_required(function=None, login_url=None): raise PermissionDenied() # As the last resort, show the login form return False + actual_decorator = user_passes_test(check_manager, login_url=login_url) if function: return actual_decorator(function) diff --git a/passerelle/utils/__init__.py b/passerelle/utils/__init__.py index 92582e87..8c886223 100644 --- a/passerelle/utils/__init__.py +++ b/passerelle/utils/__init__.py @@ -107,9 +107,9 @@ def get_request_users(request): def get_trusted_services(): - ''' + """ All services in settings.KNOWN_SERVICES are "trusted" - ''' + """ trusted_services = [] for service_type in getattr(settings, 'KNOWN_SERVICES', {}): for slug, service in settings.KNOWN_SERVICES[service_type].items(): @@ -122,16 +122,18 @@ def get_trusted_services(): def is_trusted(request): - ''' + """ True if query-string is signed by a trusted service (see get_trusted_services() above) - ''' + """ if not request.GET.get('orig') or not request.GET.get('signature'): return False full_path = request.get_full_path() for service in get_trusted_services(): - if (service.get('verif_orig') == request.GET['orig'] - and service.get('secret') - and check_url(full_path, service['secret'])): + if ( + service.get('verif_orig') == request.GET['orig'] + and service.get('secret') + and check_url(full_path, service['secret']) + ): return True return False @@ -159,7 +161,9 @@ def protected_api(perm): if not is_authorized(request, obj, perm): raise PermissionDenied() return view_func(instance, request, *args, **kwargs) + return _wrapped_view + return decorator @@ -174,8 +178,7 @@ def content_type_match(ctype): def make_headers_safe(headers): - '''Convert dict of HTTP headers to text safely, as some services returns 8-bits encoding in headers. - ''' + """Convert dict of HTTP headers to text safely, as some services returns 8-bits encoding in headers.""" return { force_text(key, errors='replace'): force_text(value, errors='replace') for key, value in headers.items() @@ -235,6 +238,7 @@ def log_http_request(logger, request, response=None, exception=None, error_log=T # - disable CA verification if resource.verify_cert (BooleanField) exists and is set # - use a proxy for HTTP and HTTPS if resource.http_proxy exists + class Request(RequestSession): ADAPTER_REGISTRY = {} # connection pooling @@ -261,9 +265,9 @@ class Request(RequestSession): if keystore: kwargs['cert'] = keystore.path if 'verify' not in kwargs: - trusted_certificate_authorities = getattr(self.resource, - 'trusted_certificate_authorities', - None) + trusted_certificate_authorities = getattr( + self.resource, 'trusted_certificate_authorities', None + ) if trusted_certificate_authorities: kwargs['verify'] = trusted_certificate_authorities.path elif hasattr(self.resource, 'verify_cert'): @@ -296,11 +300,15 @@ class Request(RequestSession): response = super(Request, self).request(method, url, **kwargs) if method == 'GET' and cache_duration and (response.status_code // 100 == 2): - cache.set(cache_key, { - 'content': response.content, - 'headers': response.headers, - 'status_code': response.status_code, - }, cache_duration) + cache.set( + cache_key, + { + 'content': response.content, + 'headers': response.headers, + 'status_code': response.status_code, + }, + cache_duration, + ) return response @@ -315,8 +323,9 @@ class Request(RequestSession): def log_http_request(self, request, response=None, exception=None): error_log = getattr(self.resource, 'log_requests_errors', True) - log_http_request(self.logger, request=request, response=response, exception=exception, error_log=error_log) - + log_http_request( + self.logger, request=request, response=response, exception=exception, error_log=error_log + ) def export_site(slugs=None): @@ -341,9 +350,9 @@ def export_site(slugs=None): def import_site(d, if_empty=False, clean=False, overwrite=False, import_users=False): - '''Load passerelle configuration (users, resources and ACLs) from a dictionnary loaded from - JSON - ''' + """Load passerelle configuration (users, resources and ACLs) from a dictionnary loaded from + JSON + """ from passerelle.base.models import ApiUser from passerelle.base.models import BaseResource @@ -382,9 +391,9 @@ def import_site(d, if_empty=False, clean=False, overwrite=False, import_users=Fa def batch(iterable, size): - '''Batch an iterable as an iterable of iterables of at most size element - long. - ''' + """Batch an iterable as an iterable of iterables of at most size element + long. + """ sourceiter = iter(iterable) while True: batchiter = islice(sourceiter, size) @@ -395,6 +404,7 @@ def batch(iterable, size): except StopIteration: return + # legacy import, other modules keep importing to_json from passerelle.utils from .jsonresponse import to_json from .soap import SOAPClient, SOAPTransport diff --git a/passerelle/utils/api.py b/passerelle/utils/api.py index ae6ec812..c36efc94 100644 --- a/passerelle/utils/api.py +++ b/passerelle/utils/api.py @@ -29,26 +29,33 @@ from .jsonresponse import APIError # noqa class endpoint(object): do_not_call_in_templates = True - def __init__(self, serializer_type='json-api', perm=None, methods=['get'], name=None, pattern=None, - wrap_response=False, - description=None, - description_get=None, - description_post=None, - description_patch=None, - long_description=None, - long_description_get=None, - long_description_post=None, - long_description_patch=None, - example_pattern=None, - parameters=None, - cache_duration=None, - post=None, - show=True, - show_undocumented_params=True, - display_order=0, - display_category='', - json_schema_response=None, - datasource=False): + def __init__( + self, + serializer_type='json-api', + perm=None, + methods=['get'], + name=None, + pattern=None, + wrap_response=False, + description=None, + description_get=None, + description_post=None, + description_patch=None, + long_description=None, + long_description_get=None, + long_description_post=None, + long_description_patch=None, + example_pattern=None, + parameters=None, + cache_duration=None, + post=None, + show=True, + show_undocumented_params=True, + display_order=0, + display_category='', + json_schema_response=None, + datasource=False, + ): self.perm = perm self.methods = methods self.serializer_type = serializer_type @@ -103,8 +110,13 @@ class endpoint(object): return self.object._category_ordering.index(self.display_category) def get_example_params(self): - return dict([(x, self.parameters[x]['example_value']) for x in self.parameters or {} - if x in self.parameters and 'example_value' in self.parameters[x]]) + return dict( + [ + (x, self.parameters[x]['example_value']) + for x in self.parameters or {} + if x in self.parameters and 'example_value' in self.parameters[x] + ] + ) def get_query_parameters(self): query_parameters = [] @@ -138,22 +150,25 @@ class endpoint(object): } if self.example_pattern: kwargs['rest'] = self.example_pattern.format( - **dict([(x, '$%s$' % x) for x in self.get_example_params().keys()])) + **dict([(x, '$%s$' % x) for x in self.get_example_params().keys()]) + ) url = reverse('generic-endpoint', kwargs=kwargs) for param in self.get_example_params(): - url = url.replace('$%s$' % param, '%s' % param) + url = url.replace('$%s$' % param, '%s' % param) query_string = '' query_parameters = self.get_query_parameters() if query_parameters: - query_string = '?' + '&'.join(['%s=%s' % (x[0], x[0]) for x in query_parameters]) + query_string = '?' + '&'.join( + ['%s=%s' % (x[0], x[0]) for x in query_parameters] + ) return mark_safe(url + query_string) def has_params(self): argspec = inspect.getargspec(self.func) - return len(argspec.args) > 2 # (self, request) + return len(argspec.args) > 2 # (self, request) @property def description(self): @@ -165,15 +180,16 @@ class endpoint(object): @property def body_schemas(self): - if (self.http_method == 'post' - and self.post - and 'request_body' in self.post - and 'schema' in self.post['request_body']): + if ( + self.http_method == 'post' + and self.post + and 'request_body' in self.post + and 'schema' in self.post['request_body'] + ): return self.post['request_body']['schema'] return {} def get_params(self): - def type_to_str(value): if isinstance(value, bool): return 'boolean' @@ -187,8 +203,9 @@ class endpoint(object): spec = inspect.getargspec(self.func) defaults = dict(zip(reversed(spec.args), reversed(spec.defaults or []))) if self.show_undocumented_params: - available_params = {arg: {} for arg in spec.args[2:] - if arg != 'post_data' and not arg in self.parameters} + available_params = { + arg: {} for arg in spec.args[2:] if arg != 'post_data' and not arg in self.parameters + } available_params.update(self.parameters) for param, info in available_params.items(): param_info = {'name': param} diff --git a/passerelle/utils/conversion.py b/passerelle/utils/conversion.py index f903ea89..df2b5cd7 100644 --- a/passerelle/utils/conversion.py +++ b/passerelle/utils/conversion.py @@ -101,10 +101,10 @@ def normalize(s): def simplify(s): - ''' + """ Simplify a string, trying to transform it to lower ascii chars (a-z, 0-9) and minimize spaces. Used to compare strings on ?q=something requests. - ''' + """ if not s: return '' s = force_text(s, 'utf-8', 'ignore') diff --git a/passerelle/utils/db.py b/passerelle/utils/db.py index a4cb9f99..9be7f94d 100644 --- a/passerelle/utils/db.py +++ b/passerelle/utils/db.py @@ -35,8 +35,9 @@ class EnsureJsonbType(Operation): _, column_name = field.get_attname_column() with schema_editor.connection.cursor() as cursor: cursor.execute( - 'ALTER TABLE {table} ALTER COLUMN {col} TYPE jsonb USING {col}::jsonb;' - .format(table=table_name, col=column_name) + 'ALTER TABLE {table} ALTER COLUMN {col} TYPE jsonb USING {col}::jsonb;'.format( + table=table_name, col=column_name + ) ) def database_backwards(self, app_label, schema_editor, from_state, to_state): diff --git a/passerelle/utils/files.py b/passerelle/utils/files.py index 89d5fef0..8c634fbe 100644 --- a/passerelle/utils/files.py +++ b/passerelle/utils/files.py @@ -23,14 +23,14 @@ from django.core.files.storage import default_storage @contextlib.contextmanager def atomic_write(filepath, **kwargs): - '''Return a file descriptor to a temporary file using NamedTemporaryFile - which will be atomically renamed to filepath if possible. + """Return a file descriptor to a temporary file using NamedTemporaryFile + which will be atomically renamed to filepath if possible. - Atomic renaming is only possible on the same filesystem, so the - temporary file will be created in the same directory as the target file + Atomic renaming is only possible on the same filesystem, so the + temporary file will be created in the same directory as the target file - You can pass any possible argument to NamedTemporaryFile with kwargs. - ''' + You can pass any possible argument to NamedTemporaryFile with kwargs. + """ tmp_dir = kwargs.pop('dir', None) if not tmp_dir: diff --git a/passerelle/utils/http_authenticators.py b/passerelle/utils/http_authenticators.py index aea9316d..0c5b6cfd 100644 --- a/passerelle/utils/http_authenticators.py +++ b/passerelle/utils/http_authenticators.py @@ -27,7 +27,6 @@ from requests.auth import AuthBase class HawkAuth(AuthBase): - def __init__(self, id, key, algorithm='sha256', ext=''): self.id = id.encode('utf-8') self.key = key.encode('utf-8') @@ -55,13 +54,28 @@ class HawkAuth(AuthBase): elif url_parts.scheme == 'https': port = '443' hash = self.get_payload_hash(req) - data = ['hawk.1.header', self.timestamp, self.nonce, req.method.upper(), uri, - url_parts.hostname, port, hash, self.ext, ''] + data = [ + 'hawk.1.header', + self.timestamp, + self.nonce, + req.method.upper(), + uri, + url_parts.hostname, + port, + hash, + self.ext, + '', + ] digestmod = getattr(hashlib, self.algorithm) result = hmac.new(force_bytes(self.key), force_bytes('\n'.join(data)), digestmod) mac = force_text(base64.b64encode(result.digest())) - authorization = 'Hawk id="%s", ts="%s", nonce="%s", hash="%s", mac="%s"'% (force_text(self.id), self.timestamp, self.nonce, - hash, mac) + authorization = 'Hawk id="%s", ts="%s", nonce="%s", hash="%s", mac="%s"' % ( + force_text(self.id), + self.timestamp, + self.nonce, + hash, + mac, + ) if self.ext: authorization += ', ext="%s"' % self.ext return authorization diff --git a/passerelle/utils/json.py b/passerelle/utils/json.py index 354b3444..9ecb8f90 100644 --- a/passerelle/utils/json.py +++ b/passerelle/utils/json.py @@ -40,14 +40,14 @@ FLATTEN_SEPARATOR = '/' def unflatten(d, separator=FLATTEN_SEPARATOR): - '''Transform: + """Transform: - {"a/b/0/x": "1234"} + {"a/b/0/x": "1234"} - into: + into: - {"a": {"b": [{"x": "1234"}]}} - ''' + {"a": {"b": [{"x": "1234"}]}} + """ if not isinstance(d, dict) or not d: # unflattening an empty dict has no sense return d @@ -55,13 +55,14 @@ def unflatten(d, separator=FLATTEN_SEPARATOR): def map_digits(l): return [int(x) if is_number(x) else x for x in l] + keys = [(map_digits(key.split(separator)), key) for key in d] keys.sort() def set_path(path, orig_key, d, value, i=0): assert path - key, tail = path[i], path[i + 1:] + key, tail = path[i], path[i + 1 :] if not tail: # end of path, set thevalue if isinstance(key, int): @@ -78,9 +79,10 @@ def unflatten(d, separator=FLATTEN_SEPARATOR): if isinstance(key, int): assert isinstance(d, list) if len(d) < key: - raise ValueError('incomplete array before %s in %s' % ( - separator.join(map(str, path[:i + 1])), - orig_key)) + raise ValueError( + 'incomplete array before %s in %s' + % (separator.join(map(str, path[: i + 1])), orig_key) + ) elif len(d) == key: d.append(new) else: @@ -114,6 +116,7 @@ def flatten(data, separator=FLATTEN_SEPARATOR): yield [str(key)] + path, value else: yield [], data + return {separator.join(path): value for path, value in helper(data)} @@ -151,8 +154,6 @@ def flatten_json_schema(schema, separator=FLATTEN_SEPARATOR): return { 'type': 'object', 'description': 'flattened schema *never* use for validation', - 'properties': { - key: schema for key, schema in helper('', schema) - }, + 'properties': {key: schema for key, schema in helper('', schema)}, 'additionalProperties': False, } diff --git a/passerelle/utils/jsonresponse.py b/passerelle/utils/jsonresponse.py index 221b053d..eb986c6a 100644 --- a/passerelle/utils/jsonresponse.py +++ b/passerelle/utils/jsonresponse.py @@ -141,23 +141,18 @@ class to_json(object): if hasattr(logger, 'connector'): max_size = logger.connector.logging_parameters.requests_max_size or max_size extras.update({'body': repr(req.body[:max_size])}) - if (not isinstance(e, (Http404, PermissionDenied, ObjectDoesNotExist, RequestException)) - and getattr(e, 'log_error', True)): + if not isinstance( + e, (Http404, PermissionDenied, ObjectDoesNotExist, RequestException) + ) and getattr(e, 'log_error', True): logger.exception("Error occurred while processing request", extra=extras) elif isinstance(e, ObjectDoesNotExist): logger.warning('object not found: %r', e, extra=extras) elif isinstance(e, PermissionDenied): logger.warning('Permission denied', extra=extras) elif isinstance(e, HTTPError): - log_http_request(logger, - request=e.request, - response=e.response, - extra=extras) + log_http_request(logger, request=e.request, response=e.response, extra=extras) elif isinstance(e, RequestException): - log_http_request(logger, - request=e.request, - exception=e, - extra=extras) + log_http_request(logger, request=e.request, exception=e, extra=extras) elif isinstance(e, Http404): # Http404 is for silent object not found exceptions pass diff --git a/passerelle/utils/paginator.py b/passerelle/utils/paginator.py index 9aa2e4c1..7062d24a 100644 --- a/passerelle/utils/paginator.py +++ b/passerelle/utils/paginator.py @@ -8,8 +8,8 @@ class InfinitePaginator(Paginator): number = self.validate_number(number) offset = (number - 1) * self.per_page - window_items = list(self.object_list[offset:offset + self.per_page + 1]) - page_items = window_items[:self.per_page] + window_items = list(self.object_list[offset : offset + self.per_page + 1]) + page_items = window_items[: self.per_page] if not page_items: if number == 1 and self.allow_empty_first_page: @@ -22,7 +22,7 @@ class InfinitePaginator(Paginator): @cached_property def count(self): - return 2**32 + return 2 ** 32 @cached_property def page_range(self): diff --git a/passerelle/utils/sftp.py b/passerelle/utils/sftp.py index a75e1fe6..82deb382 100644 --- a/passerelle/utils/sftp.py +++ b/passerelle/utils/sftp.py @@ -34,6 +34,7 @@ from django.utils.encoding import force_bytes import paramiko from paramiko.dsskey import DSSKey from paramiko.ecdsakey import ECDSAKey + try: from paramiko.ed25519key import Ed25519Key except ImportError: @@ -94,9 +95,12 @@ class SFTP(object): return re.sub(r'://([^/]*:[^/]*?)@', '://***:***@', self.url) def __eq__(self, other): - return (isinstance(other, SFTP) and other.url == self.url - and other.private_key_content == self.private_key_content - and other.private_key_password == self.private_key_password) + return ( + isinstance(other, SFTP) + and other.url == self.url + and other.private_key_content == self.private_key_content + and other.private_key_password == self.private_key_password + ) # Paramiko can hang processes if not closed, it's important to use it as a # contextmanager @@ -116,7 +120,8 @@ class SFTP(object): look_for_keys=False, allow_agent=False, username=self.username, - password=self.password) + password=self.password, + ) client = ssh.open_sftp() try: if self.path: @@ -129,6 +134,7 @@ class SFTP(object): if not os.path.normpath(path).startswith(base_cwd): raise ValueError('all paths must be under base path %s: %s' % (base_cwd, path)) return path + client._adjust_cwd = _adjust_cwd yield client finally: @@ -182,9 +188,7 @@ class SFTPFormField(forms.MultiValueField): forms.CharField(required=False), forms.CharField(required=False), ] - super(SFTPFormField, self).__init__( - fields=fields, - require_all_fields=False, **kwargs) + super(SFTPFormField, self).__init__(fields=fields, require_all_fields=False, **kwargs) def compress(self, data_list): if not data_list: @@ -200,9 +204,8 @@ class SFTPFormField(forms.MultiValueField): if not pkey: raise forms.ValidationError(_('SSH private key invalid')) return SFTP( - url=url, - private_key_content=private_key_content, - private_key_password=private_key_password) + url=url, private_key_content=private_key_content, private_key_password=private_key_password + ) class SFTPField(models.Field): @@ -236,4 +239,3 @@ class SFTPField(models.Field): } defaults.update(**kwargs) return super(SFTPField, self).formfield(**defaults) - diff --git a/passerelle/utils/soap.py b/passerelle/utils/soap.py index 6c41c07c..a228c901 100644 --- a/passerelle/utils/soap.py +++ b/passerelle/utils/soap.py @@ -34,13 +34,14 @@ class SOAPClient(Client): resource muste have a wsdl_url and a requests attribute """ + def __init__(self, resource, **kwargs): wsdl_url = kwargs.pop('wsdl_url', None) or resource.wsdl_url transport_kwargs = kwargs.pop('transport_kwargs', {}) transport_class = getattr(resource, 'soap_transport_class', SOAPTransport) - transport = transport_class(resource, wsdl_url, - session=resource.requests, - cache=InMemoryCache(), **transport_kwargs) + transport = transport_class( + resource, wsdl_url, session=resource.requests, cache=InMemoryCache(), **transport_kwargs + ) super(SOAPClient, self).__init__(wsdl_url, transport=transport, **kwargs) @@ -58,7 +59,7 @@ class ResponseFixContentWrapper: try: first_less_than_sign = content.index(b'<') last_greater_than_sign = content.rindex(b'>') - content = content[first_less_than_sign:last_greater_than_sign + 1] + content = content[first_less_than_sign : last_greater_than_sign + 1] except ValueError: pass return content @@ -69,6 +70,7 @@ class SOAPTransport(Transport): disable basic_authentication hosts unrelated to wsdl's endpoints """ + def __init__(self, resource, wsdl_url, remove_first_bytes_for_xml=False, **kwargs): self.resource = resource self.wsdl_host = urlparse.urlparse(wsdl_url).netloc @@ -84,7 +86,9 @@ class SOAPTransport(Transport): return response.content return super(SOAPTransport, self)._load_remote_data(url) except RequestException as e: - raise SOAPError('SOAP service is down, location %r cannot be loaded: %s' % (url, e), exception=e, url=url) + raise SOAPError( + 'SOAP service is down, location %r cannot be loaded: %s' % (url, e), exception=e, url=url + ) def post_xml(self, *args, **kwargs): response = super().post_xml(*args, **kwargs) diff --git a/passerelle/utils/spooler.py b/passerelle/utils/spooler.py index fb2596c3..ee3cd99e 100644 --- a/passerelle/utils/spooler.py +++ b/passerelle/utils/spooler.py @@ -31,10 +31,7 @@ def run_job(args): # multitenant installation cmd_args.append('tenant_command') - cmd_args += [ - 'runjob', - '--job-id', args['job_id'] - ] + cmd_args += ['runjob', '--job-id', args['job_id']] if args.get('domain'): # multitenant installation diff --git a/passerelle/utils/templates.py b/passerelle/utils/templates.py index 81c509f5..fe44c109 100644 --- a/passerelle/utils/templates.py +++ b/passerelle/utils/templates.py @@ -27,14 +27,14 @@ from django.utils.translation import ugettext as _ def make_template(template_string): - engine = DjangoTemplates({ - 'NAME': 'django', - 'DIRS': [], - 'APP_DIRS': False, - 'OPTIONS': { - 'autoescape': False - }, - }) + engine = DjangoTemplates( + { + 'NAME': 'django', + 'DIRS': [], + 'APP_DIRS': False, + 'OPTIONS': {'autoescape': False}, + } + ) return engine.from_string(template_string) @@ -47,4 +47,3 @@ def validate_template(template_string): make_template(template_string) except TemplateSyntaxError as e: raise ValidationError(_('Invalid template: %s') % e) - diff --git a/passerelle/utils/validation.py b/passerelle/utils/validation.py index f5f7838e..6988c3a3 100644 --- a/passerelle/utils/validation.py +++ b/passerelle/utils/validation.py @@ -20,4 +20,3 @@ def is_number(string): return string.isdecimal() and [ord(c) < 256 for c in string] else: # str PY2 return string.isdigit() - diff --git a/passerelle/utils/wcs.py b/passerelle/utils/wcs.py index e6672338..203a14fb 100644 --- a/passerelle/utils/wcs.py +++ b/passerelle/utils/wcs.py @@ -167,8 +167,8 @@ class FormData(BaseObject): @property def endpoint_delay(self): - '''Compute delay as the time when the last not endpoint status precedes an endpoint - status.''' + """Compute delay as the time when the last not endpoint status precedes an endpoint + status.""" statuses_map = self.formdef.schema.workflow.statuses_map s = 0 for evo in self.evolution[::-1]: @@ -240,6 +240,7 @@ class FormDatas(object): def __getitem__(self, slice_or_id): # get batch of forms if isinstance(slice_or_id, slice): + def helper(): if slice_or_id.stop <= slice_or_id.start or slice_or_id.step: raise ValueError('invalid slice %s' % slice_or_id) @@ -261,6 +262,7 @@ class FormDatas(object): if not d.get('receipt_time'): continue yield FormData(wcs_api=self.wcs_api, forms=self, formdef=self.formdef, **d) + return helper() # or get one form else: @@ -291,7 +293,7 @@ class FormDatas(object): start = 0 while True: empty = True - for formdef in self[start:start + self.batch]: + for formdef in self[start : start + self.batch]: empty = False yield formdef if empty: @@ -355,10 +357,7 @@ class FormDefSubmit(object): if value is None or value == {} or value == []: self.data.pop(varname, None) elif hasattr(self, '_set_type_%s' % field.type): - getattr(self, '_set_type_%s' % field.type)( - varname=varname, - field=field, - value=value, **kwargs) + getattr(self, '_set_type_%s' % field.type)(varname=varname, field=field, value=value, **kwargs) else: self.data[varname] = value @@ -480,10 +479,7 @@ class FormDef(BaseObject): @contextlib.contextmanager def submit(self, **kwargs): - submitter = FormDefSubmit( - wcs_api=self._wcs_api, - formdef=self, - **kwargs) + submitter = FormDefSubmit(wcs_api=self._wcs_api, formdef=self, **kwargs) try: yield submitter except CancelSubmitError: @@ -543,7 +539,9 @@ class Categories(WcsObjects): class WcsApi(object): - def __init__(self, url, email=None, name_id=None, batch_size=1000, session=None, logger=None, orig=None, key=None): + def __init__( + self, url, email=None, name_id=None, batch_size=1000, session=None, logger=None, orig=None, key=None + ): self.url = url self.batch_size = batch_size self.email = email @@ -600,7 +598,9 @@ class WcsApi(object): if self.key: final_url = signature.sign_url(final_url, self.key) try: - response = self.requests.post(final_url, data=json.dumps(data), headers={'content-type': 'application/json'}) + response = self.requests.post( + final_url, data=json.dumps(data), headers={'content-type': 'application/json'} + ) response.raise_for_status() except requests.RequestException as e: content = getattr(getattr(e, 'response', None), 'content', None) @@ -631,16 +631,11 @@ def get_wcs_choices(session=None): def helper(): for key, value in known_services.get('wcs', {}).items(): - api = WcsApi( - url=value['url'], - orig=value['orig'], - key=value['secret'], - session=session) + api = WcsApi(url=value['url'], orig=value['orig'], key=value['secret'], session=session) for formdef in list(api.formdefs): - title = '%s - %s' % ( - value['title'], - formdef.title) + title = '%s - %s' % (value['title'], formdef.title) yield key, formdef.slug, title + cached_choices = sorted(helper(), key=lambda x: x[2]) cache.set('wcs-formdef-choices', cached_choices, 600) @@ -667,10 +662,8 @@ class FormDefRef(object): if not self._api: config = settings.KNOWN_SERVICES['wcs'].get(self.wcs_slug) self._api = WcsApi( - url=config['url'], - orig=config['orig'], - key=config['secret'], - session=self.session) + url=config['url'], orig=config['orig'], key=config['secret'], session=self.session + ) return self._api @property @@ -701,9 +694,7 @@ class FormDefRef(object): class FormDefFormField(forms.TypedChoiceField): def __init__(self, **kwargs): - super(FormDefFormField, self).__init__( - choices=self.get_formdef_choices, - coerce=FormDefRef, **kwargs) + super(FormDefFormField, self).__init__(choices=self.get_formdef_choices, coerce=FormDefRef, **kwargs) def get_formdef_choices(self): requests = getattr(self, 'requests', None) diff --git a/passerelle/utils/xml.py b/passerelle/utils/xml.py index c09124f7..6a6618b6 100644 --- a/passerelle/utils/xml.py +++ b/passerelle/utils/xml.py @@ -22,8 +22,8 @@ import xmlschema def text_content(node): - '''Extract text content from node and all its children. Equivalent to - xmlNodeGetContent from libxml.''' + """Extract text content from node and all its children. Equivalent to + xmlNodeGetContent from libxml.""" if node is None: return '' @@ -37,42 +37,43 @@ def text_content(node): if child.tail: s.append(child.tail) return s + return u''.join(helper(node)) def to_json(root): - '''Convert an XML document (a rooted tree) into dictionnary compatible with - JSON serialization following those rules: - - root is converted into a dictionnary, its children's node name are the - keys, - - all child nodes without child are considered to be only text and - converted to a JSON string, - - all child nodes with children are converted to an array with they - children as root of a new conversion from XML to JSON. + """Convert an XML document (a rooted tree) into dictionnary compatible with + JSON serialization following those rules: + - root is converted into a dictionnary, its children's node name are the + keys, + - all child nodes without child are considered to be only text and + converted to a JSON string, + - all child nodes with children are converted to an array with they + children as root of a new conversion from XML to JSON. - Ex.: + Ex.: - - wtv - - - 2 - - - 3 - - - + + wtv + + + 2 + + + 3 + + + - is converted to: + is converted to: - { - "child1": "wtv", - "rows": [ - {"child2": "2"}, - {"child3": "3"} - ] - }''' + { + "child1": "wtv", + "rows": [ + {"child2": "2"}, + {"child3": "3"} + ] + }""" d = {} for child in root: @@ -138,8 +139,7 @@ class JSONSchemaFromXMLSchema(object): self.json_schema = { 'type': 'object', 'properties': { - root_element: self.element_to_jsonschema( - xml_schema.elements[root_element]), + root_element: self.element_to_jsonschema(xml_schema.elements[root_element]), }, 'required': [root_element], 'additionalProperties': False, @@ -155,10 +155,12 @@ class JSONSchemaFromXMLSchema(object): return getattr(cls, 'schema_%s' % mapped)() if isinstance(simple_type, xmlschema.validators.XsdAtomicBuiltin): - if (simple_type.min_length - or simple_type.max_length - or simple_type.white_space not in ('collapse', 'preserve') - or simple_type.patterns): + if ( + simple_type.min_length + or simple_type.max_length + or simple_type.white_space not in ('collapse', 'preserve') + or simple_type.patterns + ): raise NotImplementedError(simple_type) if simple_type.name in cls.SIMPLE_TYPE_MAPPING: @@ -168,32 +170,42 @@ class JSONSchemaFromXMLSchema(object): return schema if isinstance(simple_type, xmlschema.validators.XsdAtomicRestriction): - if (simple_type.white_space not in ('collapse', 'preserve')): + if simple_type.white_space not in ('collapse', 'preserve'): raise NotImplementedError(simple_type) schema = OrderedDict(cls.simpletype_to_jsonschema(simple_type.base_type)) for validator in simple_type.validators: if isinstance(validator, xmlschema.validators.XsdEnumerationFacets): schema['enum'] = validator.enumeration - elif (isinstance(validator, xmlschema.validators.XsdMinLengthFacet) - and simple_type.base_type.name == xmlschema.qnames.XSD_STRING): + elif ( + isinstance(validator, xmlschema.validators.XsdMinLengthFacet) + and simple_type.base_type.name == xmlschema.qnames.XSD_STRING + ): schema['minLength'] = validator.value - elif (isinstance(validator, xmlschema.validators.XsdMaxLengthFacet) - and simple_type.base_type.name == xmlschema.qnames.XSD_STRING): + elif ( + isinstance(validator, xmlschema.validators.XsdMaxLengthFacet) + and simple_type.base_type.name == xmlschema.qnames.XSD_STRING + ): schema['maxLength'] = validator.value - elif (isinstance(validator, xmlschema.validators.XsdLengthFacet) - and simple_type.base_type.name == xmlschema.qnames.XSD_STRING): + elif ( + isinstance(validator, xmlschema.validators.XsdLengthFacet) + and simple_type.base_type.name == xmlschema.qnames.XSD_STRING + ): schema['minLength'] = validator.value schema['maxLength'] = validator.value elif isinstance(validator, xmlschema.validators.XsdMinInclusiveFacet): schema['minimum'] = validator.value elif isinstance(validator, xmlschema.validators.XsdMaxInclusiveFacet): schema['maximum'] = validator.value - elif (isinstance(validator, xmlschema.validators.XsdTotalDigitsFacet) - and simple_type.base_type.name == xmlschema.qnames.XSD_DECIMAL): - schema['exclusiveMaximum'] = 10**validator.value - elif (isinstance(validator, xmlschema.validators.XsdFractionDigitsFacet) - and simple_type.base_type.name == xmlschema.qnames.XSD_DECIMAL): - schema['multipleOf'] = 1 / 10.**validator.value + elif ( + isinstance(validator, xmlschema.validators.XsdTotalDigitsFacet) + and simple_type.base_type.name == xmlschema.qnames.XSD_DECIMAL + ): + schema['exclusiveMaximum'] = 10 ** validator.value + elif ( + isinstance(validator, xmlschema.validators.XsdFractionDigitsFacet) + and simple_type.base_type.name == xmlschema.qnames.XSD_DECIMAL + ): + schema['multipleOf'] = 1 / 10.0 ** validator.value else: raise NotImplementedError(validator) if simple_type.patterns: @@ -274,8 +286,7 @@ class JSONSchemaFromXMLSchema(object): for component in alternative: properties = schema.setdefault('properties', OrderedDict()) properties[component.name] = cls.element_to_jsonschema(component) - if (component.min_occurs > 0 - and component.name not in schema.get('required', [])): + if component.min_occurs > 0 and component.name not in schema.get('required', []): schema.setdefault('required', []).append(component.name) if len(alternatives) == 1: @@ -345,12 +356,10 @@ class JSONSchemaFromXMLSchema(object): def encode(self, instance): return self.xml_schema.elements[self.root_element].encode( - instance[self.root_element], - converter=TransformConverter, - transformer=self) + instance[self.root_element], converter=TransformConverter, transformer=self + ) def decode(self, source): return self.xml_schema.elements[self.root_element].decode( - source, - converter=TransformConverter, - transformer=self) + source, converter=TransformConverter, transformer=self + ) diff --git a/passerelle/utils/zip.py b/passerelle/utils/zip.py index 4a322363..b05b1ee6 100644 --- a/passerelle/utils/zip.py +++ b/passerelle/utils/zip.py @@ -71,7 +71,7 @@ SCHEMA = { 'type': 'string', }, }, - } + }, ] }, }, @@ -90,6 +90,7 @@ class ZipTemplateDoesNotExist(ZipTemplateError): class ZipTemplateSyntaxError(ZipTemplateError): pass + VARIABLE_RE = re.compile(r'{{ *(\w*)') @@ -100,7 +101,9 @@ class ZipPart(object): self._name_template = name_template self.template_path = template_path self.content_expression = content_expression - assert bool(self.template_path) ^ bool(self.content_expression), '\ + assert bool(self.template_path) ^ bool( + self.content_expression + ), '\ template_path and content_expression are mutually excluded' @property @@ -252,8 +255,7 @@ def diff_zip(one, two): if content_one == content_two: return if one.endswith(('.xml', '.json', '.txt')): - diff = list(difflib.ndiff(content_one.splitlines(), - content_two.splitlines())) + diff = list(difflib.ndiff(content_one.splitlines(), content_two.splitlines())) return ['File %s differs' % one] + diff return 'File %s differs' % one diff --git a/passerelle/views.py b/passerelle/views.py index ae69e790..5c0613ff 100644 --- a/passerelle/views.py +++ b/passerelle/views.py @@ -32,8 +32,15 @@ from django.db.models import Q from django.http import HttpResponse, HttpResponseRedirect, Http404 from django.views.decorators.csrf import csrf_exempt from django.views.generic import ( - RedirectView, View, TemplateView, CreateView, DeleteView, UpdateView, - DetailView, ListView) + RedirectView, + View, + TemplateView, + CreateView, + DeleteView, + UpdateView, + DetailView, + ListView, +) from django.views.generic.detail import SingleObjectMixin from django.conf import settings from django.shortcuts import resolve_url @@ -59,6 +66,7 @@ from .forms import ResourceLogSearchForm if 'mellon' in settings.INSTALLED_APPS: from mellon.utils import get_idps else: + def get_idps(): return [] @@ -72,10 +80,12 @@ class LoginView(auth_views.LoginView): if any(get_idps()): if 'next' not in request.GET: return HttpResponseRedirect(resolve_url('mellon_login')) - return HttpResponseRedirect(resolve_url('mellon_login') + '?next=' - + quote(request.GET.get('next'))) + return HttpResponseRedirect( + resolve_url('mellon_login') + '?next=' + quote(request.GET.get('next')) + ) return super(LoginView, self).dispatch(request, *args, **kwargs) + login = LoginView.as_view() @@ -92,13 +102,15 @@ def logout(request, next_page=None): def menu_json(request): label = _('Web Services') - json_str = json.dumps([ - { - 'label': force_text(label), - 'slug': 'passerelle', - 'url': request.build_absolute_uri(reverse('manage-home')) - } - ]) + json_str = json.dumps( + [ + { + 'label': force_text(label), + 'slug': 'passerelle', + 'url': request.build_absolute_uri(reverse('manage-home')), + } + ] + ) content_type = 'application/json' for variable in ('jsonpCallback', 'callback'): if variable in request.GET: @@ -161,15 +173,13 @@ class GenericConnectorMixin(object): def dispatch(self, request, *args, **kwargs): self.init_stuff(request, *args, **kwargs) - return super(GenericConnectorMixin, self).dispatch( - request, *args, **kwargs) + return super(GenericConnectorMixin, self).dispatch(request, *args, **kwargs) class GenericConnectorView(GenericConnectorMixin, DetailView): def get_context_data(self, slug=None, **kwargs): context = super(GenericConnectorView, self).get_context_data(**kwargs) - context['has_check_status'] = not hasattr( - context['object'].check_status, 'not_implemented') + context['has_check_status'] = not hasattr(context['object'].check_status, 'not_implemented') return context def get_template_names(self): @@ -225,9 +235,7 @@ class GenericViewLogsConnectorView(GenericConnectorMixin, ListView): if self.request.GET.get('log_id'): try: context['log_target'] = ResourceLog.objects.get( - appname=self.kwargs['connector'], - slug=self.kwargs['slug'], - pk=self.request.GET['log_id'] + appname=self.kwargs['connector'], slug=self.kwargs['slug'], pk=self.request.GET['log_id'] ) except (ValueError, ResourceLog.DoesNotExist): pass @@ -238,9 +246,9 @@ class GenericViewLogsConnectorView(GenericConnectorMixin, ListView): def get_queryset(self): self.form = ResourceLogSearchForm(data=self.request.GET) - qs = ResourceLog.objects.filter( - appname=self.kwargs['connector'], - slug=self.kwargs['slug']).order_by('-timestamp') + qs = ResourceLog.objects.filter(appname=self.kwargs['connector'], slug=self.kwargs['slug']).order_by( + '-timestamp' + ) query = None level = None if self.form.is_valid(): @@ -265,16 +273,13 @@ class GenericViewLogsConnectorView(GenericConnectorMixin, ListView): date = make_aware(date) if date.hour == 0 and date.minute == 0 and date.second == 0: # just a date: display all events for that date - qs = qs.filter(timestamp__gte=date, - timestamp__lte=date + datetime.timedelta(days=1)) + qs = qs.filter(timestamp__gte=date, timestamp__lte=date + datetime.timedelta(days=1)) elif date.second == 0: # without seconds: display all events in this minute - qs = qs.filter(timestamp__gte=date, - timestamp__lte=date + datetime.timedelta(seconds=60)) + qs = qs.filter(timestamp__gte=date, timestamp__lte=date + datetime.timedelta(seconds=60)) else: # display all events in the same second - qs = qs.filter(timestamp__gte=date, - timestamp__lte=date + datetime.timedelta(seconds=1)) + qs = qs.filter(timestamp__gte=date, timestamp__lte=date + datetime.timedelta(seconds=1)) return qs @@ -285,9 +290,8 @@ class GenericLogView(GenericConnectorMixin, DetailView): context = super(GenericLogView, self).get_context_data(**kwargs) try: context['logline'] = ResourceLog.objects.get( - pk=self.kwargs['log_pk'], - appname=self.kwargs['connector'], - slug=self.kwargs['slug']) + pk=self.kwargs['log_pk'], appname=self.kwargs['connector'], slug=self.kwargs['slug'] + ) except ResourceLog.DoesNotExist: raise Http404() return context @@ -320,8 +324,22 @@ class InvalidParameterValue(Exception): def __str__(self): return 'invalid value for parameter "%s"' % self.parameter_name -IGNORED_PARAMS = ('apikey', 'signature', 'nonce', 'algo', 'timestamp', 'orig', 'jsonpCallback', - 'callback', '_', 'raise', 'debug', 'decode', 'format') + +IGNORED_PARAMS = ( + 'apikey', + 'signature', + 'nonce', + 'algo', + 'timestamp', + 'orig', + 'jsonpCallback', + 'callback', + '_', + 'raise', + 'debug', + 'decode', + 'format', +) class GenericEndpointView(GenericConnectorMixin, SingleObjectMixin, View): @@ -460,22 +478,24 @@ class GenericEndpointView(GenericConnectorMixin, SingleObjectMixin, View): # auto log request's inputs connector_name, endpoint_name = kwargs['connector'], kwargs['endpoint'] url = request.get_full_path() - payload = request.body[:self.connector.logging_parameters.requests_max_size - or settings.LOGGED_REQUESTS_MAX_SIZE] + payload = request.body[ + : self.connector.logging_parameters.requests_max_size or settings.LOGGED_REQUESTS_MAX_SIZE + ] try: payload = payload.decode('utf-8') except UnicodeDecodeError: payload = '' - self.connector.logger.info('endpoint %s %s (%r) ' % - (request.method, url, payload), - extra={ - 'request': request, - 'connector': connector_name, - 'connector_endpoint': endpoint_name, - 'connector_endpoint_method': request.method, - 'connector_endpoint_url': url, - 'connector_payload': payload - }) + self.connector.logger.info( + 'endpoint %s %s (%r) ' % (request.method, url, payload), + extra={ + 'request': request, + 'connector': connector_name, + 'connector_endpoint': endpoint_name, + 'connector_endpoint_method': request.method, + 'connector_endpoint_url': url, + 'connector_payload': payload, + }, + ) params = self.get_params(request, *args, **kwargs) if request.method == 'GET' and self.endpoint.endpoint_info.cache_duration: @@ -542,14 +562,11 @@ class GenericEndpointView(GenericConnectorMixin, SingleObjectMixin, View): class GenericExportConnectorView(GenericConnectorMixin, DetailView): - def get(self, request, *args, **kwargs): response = HttpResponse(content_type='application/json') today = datetime.date.today() response['Content-Disposition'] = 'attachment; filename="export_{}_{}_{}.json"'.format( - self.get_object().get_connector_slug(), - self.get_object().slug, - today.strftime('%Y%m%d') + self.get_object().get_connector_slug(), self.get_object().slug, today.strftime('%Y%m%d') ) json.dump({'resources': [self.get_object().export_json()]}, response, indent=2) return response diff --git a/passerelle/wsgi.py b/passerelle/wsgi.py index 04593d1c..895fbfcb 100644 --- a/passerelle/wsgi.py +++ b/passerelle/wsgi.py @@ -21,6 +21,7 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passerelle.settings") # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application + application = get_wsgi_application() # Apply WSGI middleware here. diff --git a/setup.py b/setup.py index e1707aa8..d7a20f73 100755 --- a/setup.py +++ b/setup.py @@ -25,9 +25,9 @@ class eo_sdist(sdist): def get_version(): - '''Use the VERSION, if absent generates a version with git describe, if not - tag exists, take 0.0- and add the length of the commit log. - ''' + """Use the VERSION, if absent generates a version with git describe, if not + tag exists, take 0.0- and add the length of the commit log. + """ if os.path.exists('VERSION'): with open('VERSION', 'r') as v: return v.read() @@ -65,6 +65,7 @@ class compile_translations(Command): curdir = os.getcwd() try: from django.core.management import call_command + for path, dirs, files in os.walk('passerelle'): if 'locale' not in dirs: continue @@ -86,49 +87,50 @@ class install_lib(_install_lib): _install_lib.run(self) -setup(name='passerelle', - version=get_version(), - license='AGPLv3', - description='Passerelle provides an uniform access to multiple data sources and services.', - url='https://dev.entrouvert.org/projects/passerelle/', - download_url='http://repos.entrouvert.org/passerelle.git/', - author="Entr'ouvert", - author_email="info@entrouvert.com", - packages=find_packages(os.path.dirname(__file__) or '.'), - scripts=['manage.py'], - include_package_data=True, - install_requires=[ - 'django >= 1.11, <2.3', - 'django-model-utils<4', - 'requests', - 'gadjo', - 'phpserialize', - 'suds-jurko', - 'pyexcel-io', - 'pyexcel-ods', - 'pyexcel-xls', - 'cmislib-maykin', - 'pyproj', - 'feedparser<6' if sys.version_info < (3, 9) else 'feedparser>=6', - 'lxml', - 'python-dateutil', - 'Pillow', - 'jsonschema < 3.1', - 'zeep >= 3.2', - 'pycrypto', - 'pycryptodomex', - 'unidecode', - 'paramiko', - 'pdfrw', - 'httplib2', - 'xmlschema<1.1', - 'pytz', - ], - cmdclass={ - 'build': build, - 'compile_translations': compile_translations, - 'install_lib': install_lib, - 'sdist': eo_sdist, - }, - package_data={'passerelle': ['*.xsd']} +setup( + name='passerelle', + version=get_version(), + license='AGPLv3', + description='Passerelle provides an uniform access to multiple data sources and services.', + url='https://dev.entrouvert.org/projects/passerelle/', + download_url='http://repos.entrouvert.org/passerelle.git/', + author="Entr'ouvert", + author_email="info@entrouvert.com", + packages=find_packages(os.path.dirname(__file__) or '.'), + scripts=['manage.py'], + include_package_data=True, + install_requires=[ + 'django >= 1.11, <2.3', + 'django-model-utils<4', + 'requests', + 'gadjo', + 'phpserialize', + 'suds-jurko', + 'pyexcel-io', + 'pyexcel-ods', + 'pyexcel-xls', + 'cmislib-maykin', + 'pyproj', + 'feedparser<6' if sys.version_info < (3, 9) else 'feedparser>=6', + 'lxml', + 'python-dateutil', + 'Pillow', + 'jsonschema < 3.1', + 'zeep >= 3.2', + 'pycrypto', + 'pycryptodomex', + 'unidecode', + 'paramiko', + 'pdfrw', + 'httplib2', + 'xmlschema<1.1', + 'pytz', + ], + cmdclass={ + 'build': build, + 'compile_translations': compile_translations, + 'install_lib': install_lib, + 'sdist': eo_sdist, + }, + package_data={'passerelle': ['*.xsd']}, ) diff --git a/tests/conftest.py b/tests/conftest.py index 4f855192..5b318956 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -32,70 +32,68 @@ def app(request): @urlmatch(netloc='^api-adresse.data.gouv.fr$', path='^/search/$') @remember_called def api_adresse_data_gouv_fr_search(url, request): - return response(200, { - "limit": 1, - "attribution": "BAN", - "version": "draft", - "licence": "ODbL 1.0", - "query": "plop", - "type": "FeatureCollection", - "features": [ - { - "geometry": { - "type": "Point", - "coordinates": [ - -0.593775, - 47.474633 - ] - }, - "properties": { - "citycode": "49007", - "name": "Rue Roger Halope", - "id": "49007_6950_be54bd", - "city": "Angers", - "context": "49, Maine-et-Loire, Pays de la Loire", - "score": 0.14097272727272728, - "label": "Rue Roger Halope 49000 Angers", - "postcode": "49000", - "type": "street" - }, - "type": "Feature" - } - ]}, request=request) + return response( + 200, + { + "limit": 1, + "attribution": "BAN", + "version": "draft", + "licence": "ODbL 1.0", + "query": "plop", + "type": "FeatureCollection", + "features": [ + { + "geometry": {"type": "Point", "coordinates": [-0.593775, 47.474633]}, + "properties": { + "citycode": "49007", + "name": "Rue Roger Halope", + "id": "49007_6950_be54bd", + "city": "Angers", + "context": "49, Maine-et-Loire, Pays de la Loire", + "score": 0.14097272727272728, + "label": "Rue Roger Halope 49000 Angers", + "postcode": "49000", + "type": "street", + }, + "type": "Feature", + } + ], + }, + request=request, + ) @urlmatch(netloc='^api-adresse.data.gouv.fr$', path='^/reverse/$') def api_adresse_data_gouv_fr_reverse(url, request): - return response(200, { - "limit": 1, - "attribution": "BAN", - "version": "draft", - "licence": "ODbL 1.0", - "type": "FeatureCollection", - "features": [ - { - "geometry": { - "type": "Point", - "coordinates": [ - -0.593775, - 47.474633 - ] - }, - "properties": { - "citycode": "49007", - "name": "Rue Roger Halope", - "id": "49007_6950_be54bd", - "city": "Angers", - "distance": 0, - "context": "49, Maine-et-Loire, Pays de la Loire", - "score": 1.0, - "label": "Rue Roger Halope 49000 Angers", - "postcode": "49000", - "type": "street" - }, - "type": "Feature" - } - ]}, request=request) + return response( + 200, + { + "limit": 1, + "attribution": "BAN", + "version": "draft", + "licence": "ODbL 1.0", + "type": "FeatureCollection", + "features": [ + { + "geometry": {"type": "Point", "coordinates": [-0.593775, 47.474633]}, + "properties": { + "citycode": "49007", + "name": "Rue Roger Halope", + "id": "49007_6950_be54bd", + "city": "Angers", + "distance": 0, + "context": "49, Maine-et-Loire, Pays de la Loire", + "score": 1.0, + "label": "Rue Roger Halope 49000 Angers", + "postcode": "49000", + "type": "street", + }, + "type": "Feature", + } + ], + }, + request=request, + ) @pytest.yield_fixture @@ -114,8 +112,8 @@ def mock_api_adresse_data_gouv_fr_reverse(): def endpoint_dummy_cache(monkeypatch): from django.core.cache import caches import passerelle.views - monkeypatch.setattr( - passerelle.views, 'cache', caches['dummy']) + + monkeypatch.setattr(passerelle.views, 'cache', caches['dummy']) @urlmatch() @@ -142,7 +140,8 @@ def dummy_csv_datasource(db): slug='dummy-slug', title='Dummy Title', description='dummy description', - csv_file=File(BytesIO(data), 'dummy.csv')) + csv_file=File(BytesIO(data), 'dummy.csv'), + ) Query.objects.create( resource=obj, @@ -150,20 +149,22 @@ def dummy_csv_datasource(db): structure='array', label='Dummy Query', description='dummy query description', - projections='id:int(id)\ntext:label') + projections='id:int(id)\ntext:label', + ) return obj @pytest.fixture def relax_openssl(tmpdir): - '''OpenSSL default configuration has been really strict for some years, - this fixture set a temporary really permisive ciphers list.''' + """OpenSSL default configuration has been really strict for some years, + this fixture set a temporary really permisive ciphers list.""" import os openssl_cnf_path = tmpdir / 'openssl.cnf' with openssl_cnf_path.open('w') as fd: - fd.write(u''' + fd.write( + u''' [default_conf] ssl_conf = ssl_sect @@ -171,7 +172,8 @@ ssl_conf = ssl_sect system_default = system_default_sect [system_default_sect] -CipherString = ALL''') +CipherString = ALL''' + ) old_value = os.environ.get('OPENSSL_CONF', None) try: os.environ['OPENSSL_CONF'] = str(openssl_cnf_path) diff --git a/tests/settings.py b/tests/settings.py index 3441868b..ba5e935f 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -54,9 +54,7 @@ CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, - 'dummy': { - 'BACKEND': 'django.core.cache.backends.dummy.DummyCache' - } + 'dummy': {'BACKEND': 'django.core.cache.backends.dummy.DummyCache'}, } DATABASES = { diff --git a/tests/test_actesweb.py b/tests/test_actesweb.py index 77c88329..190db168 100644 --- a/tests/test_actesweb.py +++ b/tests/test_actesweb.py @@ -46,15 +46,9 @@ def actesweb(db): PAYLOAD = [ - { - 'birth': json.loads(get_file_from_test_base_dir('payload_birth.json')) - }, - { - 'mariage': json.loads(get_file_from_test_base_dir('payload_mariage.json')) - }, - { - 'death': json.loads(get_file_from_test_base_dir('payload_death.json')) - } + {'birth': json.loads(get_file_from_test_base_dir('payload_birth.json'))}, + {'mariage': json.loads(get_file_from_test_base_dir('payload_mariage.json'))}, + {'death': json.loads(get_file_from_test_base_dir('payload_death.json'))}, ] @@ -83,7 +77,8 @@ def test_demand_creation(app, payload, actesweb): demand_id = response.json['data']['demand_id'] demfile = get_demand_filepath(actesweb, demand_id) assert_file_content_values( - demfile, dict( + demfile, + dict( DEMANDEUR_CIVILITE="Madame", DEMANDEUR_NOM_USAGE="W'hatever?", DEMANDEUR_PRENOMS="Kim Chelsea", @@ -104,15 +99,16 @@ def test_demand_creation(app, payload, actesweb): PERE_PRENOMS="John Oliver", MERE_NOM="Smith", MERE_PRENOM="Kim", - DEMANDE_SEXE="m" - ) + DEMANDE_SEXE="m", + ), ) elif 'mariage' in payload: response = app.post_json(url, params=payload['mariage']) demand_id = response.json['data']['demand_id'] demfile = get_demand_filepath(actesweb, demand_id) assert_file_content_values( - demfile, dict( + demfile, + dict( DEMANDEUR_CIVILITE="Madame", DEMANDEUR_NOM_USAGE="Whatever", DEMANDEUR_NOM="Bar", @@ -121,7 +117,7 @@ def test_demand_creation(app, payload, actesweb): DEMANDEUR_VILLE="Nancy", DEMANDEUR_CP="54001", DEMANDEUR_PAYS="France", - DEMANDEUR_TEL="+33 6 55 44 22 11", + DEMANDEUR_TEL="+33 6 55 44 22 11", DEMANDEUR_ADR="chelsea@whatever.com", DEMANDE_NOM="Whatever", DEMANDE_PRENOMS="Kevin", @@ -141,7 +137,7 @@ def test_demand_creation(app, payload, actesweb): CONJOINT_PERE_PRENOMS="Antonio", CONJOINT_MERE_NOM="Scaramucci", CONJOINT_MERE_PRENOMS="Marguerite", - ) + ), ) else: response = app.post_json(url, params=payload['death']) @@ -155,7 +151,8 @@ def test_demand_creation(app, payload, actesweb): # and no others assert not bool(os.stat(demfile).st_mode & stat.S_IRWXO) assert_file_content_values( - demfile, dict( + demfile, + dict( DEMANDEUR_CIVILITE="Madame", DEMANDEUR_NOM_USAGE="Whatever", DEMANDEUR_PRENOMS="Kim Chelsea", @@ -172,6 +169,6 @@ def test_demand_creation(app, payload, actesweb): ACTE="DE", NB="1", LIEU_EVENEMENT="Nancy", - DEMANDE_SEXE="m" - ) + DEMANDE_SEXE="m", + ), ) diff --git a/tests/test_adict.py b/tests/test_adict.py index cd81070c..94a5ff8f 100644 --- a/tests/test_adict.py +++ b/tests/test_adict.py @@ -6,30 +6,36 @@ from passerelle.contrib.adict.models import Adict import utils -FAKE_FEATURE_INFO = json.dumps({ - 'type': 'FeatureCollection', - 'features': [ - { - 'type': 'Feature', - 'properties': { - 'id': 51, - 'type': 'secteur_maternelle', - 'url': None, - 'description': None, - 'nom': 'NEUFELD' - }, - 'geometry': {} - }] -}) +FAKE_FEATURE_INFO = json.dumps( + { + 'type': 'FeatureCollection', + 'features': [ + { + 'type': 'Feature', + 'properties': { + 'id': 51, + 'type': 'secteur_maternelle', + 'url': None, + 'description': None, + 'nom': 'NEUFELD', + }, + 'geometry': {}, + } + ], + } +) @pytest.fixture def connector(db): - return utils.setup_access_rights(Adict.objects.create( - slug='test', - service_root_url='http://adict.example.net/', - api_token='xyz', - sector_type='secteur_maternelle')) + return utils.setup_access_rights( + Adict.objects.create( + slug='test', + service_root_url='http://adict.example.net/', + api_token='xyz', + sector_type='secteur_maternelle', + ) + ) @mock.patch('passerelle.utils.Request.get') diff --git a/tests/test_api.py b/tests/test_api.py index 295955fe..90713dcb 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -38,8 +38,9 @@ def connector(db): ) apiuser = ApiUser.objects.create(username='me', keytype='API', key=API_KEY) obj_type = ContentType.objects.get_for_model(OVHSMSGateway) - AccessRight.objects.create(codename='can_access', apiuser=apiuser, - resource_type=obj_type, resource_pk=connector.pk) + AccessRight.objects.create( + codename='can_access', apiuser=apiuser, resource_type=obj_type, resource_pk=connector.pk + ) return connector diff --git a/tests/test_api_access.py b/tests/test_api_access.py index 2539c973..67ee07f0 100644 --- a/tests/test_api_access.py +++ b/tests/test_api_access.py @@ -19,28 +19,28 @@ pytestmark = pytest.mark.django_db @pytest.fixture def oxyd(db): - return OxydSMSGateway.objects.create(title='eservices', - slug='eservices', - username='user', - description='oxyd', - password='secret') + return OxydSMSGateway.objects.create( + title='eservices', slug='eservices', username='user', description='oxyd', password='secret' + ) + def test_anonymous_access(app, oxyd): - endpoint_url = reverse('generic-endpoint', - kwargs={'connector': 'oxyd', 'slug': oxyd.slug, 'endpoint': 'send'}) + endpoint_url = reverse( + 'generic-endpoint', kwargs={'connector': 'oxyd', 'slug': oxyd.slug, 'endpoint': 'send'} + ) resp = app.post_json(endpoint_url, params={}, status=403) assert resp.json['err'] == 1 assert resp.json['err_class'] == 'django.core.exceptions.PermissionDenied' - api = ApiUser.objects.create(username='public', - fullname='public', - description='access for all', - keytype='', key='') + api = ApiUser.objects.create( + username='public', fullname='public', description='access for all', keytype='', key='' + ) obj_type = ContentType.objects.get_for_model(OxydSMSGateway) - AccessRight.objects.create(codename='can_send_messages', - apiuser=api, - resource_type=obj_type, - resource_pk=oxyd.pk, + AccessRight.objects.create( + codename='can_send_messages', + apiuser=api, + resource_type=obj_type, + resource_pk=oxyd.pk, ) resp = app.post_json(endpoint_url, params={}, status=400) # for empty payload the connector returns an APIError with @@ -50,20 +50,20 @@ def test_anonymous_access(app, oxyd): def test_access_with_signature(app, oxyd): - api = ApiUser.objects.create(username='eservices', - fullname='Eservices User', - description='eservices', - keytype='SIGN', - key='12345') + api = ApiUser.objects.create( + username='eservices', fullname='Eservices User', description='eservices', keytype='SIGN', key='12345' + ) obj_type = ContentType.objects.get_for_model(OxydSMSGateway) - AccessRight.objects.create(codename='can_send_messages', - apiuser=api, - resource_type=obj_type, - resource_pk=oxyd.pk, + AccessRight.objects.create( + codename='can_send_messages', + apiuser=api, + resource_type=obj_type, + resource_pk=oxyd.pk, + ) + endpoint_url = reverse( + 'generic-endpoint', kwargs={'connector': 'oxyd', 'slug': oxyd.slug, 'endpoint': 'send'} ) - endpoint_url = reverse('generic-endpoint', - kwargs={'connector': 'oxyd', 'slug': oxyd.slug, 'endpoint': 'send'}) url = signature.sign_url(endpoint_url + '?orig=eservices', '12345') # for empty payload the connector returns an APIError with # {"err_desc": "'message' is a required property"} @@ -99,21 +99,21 @@ def test_access_with_signature(app, oxyd): def test_access_http_auth(app, oxyd): username = 'apiuser' password = '12345' - api = ApiUser.objects.create(username=username, - fullname='Api User', - description='api', - keytype='SIGN', - key=password) + api = ApiUser.objects.create( + username=username, fullname='Api User', description='api', keytype='SIGN', key=password + ) obj_type = ContentType.objects.get_for_model(OxydSMSGateway) - AccessRight.objects.create(codename='can_send_messages', - apiuser=api, - resource_type=obj_type, - resource_pk=oxyd.pk, + AccessRight.objects.create( + codename='can_send_messages', + apiuser=api, + resource_type=obj_type, + resource_pk=oxyd.pk, ) app.authorization = ('Basic', (username, password)) - endpoint_url = reverse('generic-endpoint', - kwargs={'connector': 'oxyd', 'slug': oxyd.slug, 'endpoint': 'send'}) + endpoint_url = reverse( + 'generic-endpoint', kwargs={'connector': 'oxyd', 'slug': oxyd.slug, 'endpoint': 'send'} + ) resp = app.post_json(endpoint_url, params={}, status=400) assert resp.json['err'] == 1 assert resp.json['err_desc'] == "'message' is a required property" @@ -121,43 +121,43 @@ def test_access_http_auth(app, oxyd): def test_access_apikey(app, oxyd): password = 'apiuser_12345' - api = ApiUser.objects.create(username='apiuser', - fullname='Api User', - description='api', - keytype='API', - key=password) + api = ApiUser.objects.create( + username='apiuser', fullname='Api User', description='api', keytype='API', key=password + ) obj_type = ContentType.objects.get_for_model(OxydSMSGateway) - AccessRight.objects.create(codename='can_send_messages', - apiuser=api, - resource_type=obj_type, - resource_pk=oxyd.pk, + AccessRight.objects.create( + codename='can_send_messages', + apiuser=api, + resource_type=obj_type, + resource_pk=oxyd.pk, ) params = {'message': 'test'} - endpoint_url = reverse('generic-endpoint', - kwargs={'connector': 'oxyd', 'slug': oxyd.slug, 'endpoint': 'send'}) - resp = app.post_json(endpoint_url + '?apikey=' + password , params=params, status=400) + endpoint_url = reverse( + 'generic-endpoint', kwargs={'connector': 'oxyd', 'slug': oxyd.slug, 'endpoint': 'send'} + ) + resp = app.post_json(endpoint_url + '?apikey=' + password, params=params, status=400) resp.json['err'] == 1 assert resp.json['err_desc'] == "'from' is a required property" - resp = app.post_json(endpoint_url + '?apikey=' + password[:3] , params=params, status=403) + resp = app.post_json(endpoint_url + '?apikey=' + password[:3], params=params, status=403) resp.json['err'] == 1 assert resp.json['err_class'] == 'django.core.exceptions.PermissionDenied' def test_access_apiuser_with_no_key(app, oxyd): - api = ApiUser.objects.create(username='apiuser', - fullname='Api User', - description='api') + api = ApiUser.objects.create(username='apiuser', fullname='Api User', description='api') obj_type = ContentType.objects.get_for_model(OxydSMSGateway) - AccessRight.objects.create(codename='can_send_messages', - apiuser=api, - resource_type=obj_type, - resource_pk=oxyd.pk, + AccessRight.objects.create( + codename='can_send_messages', + apiuser=api, + resource_type=obj_type, + resource_pk=oxyd.pk, ) params = {'message': 'test', 'from': 'test api'} - endpoint_url = reverse('generic-endpoint', - kwargs={'connector': 'oxyd', 'slug': oxyd.slug, 'endpoint': 'send'}) + endpoint_url = reverse( + 'generic-endpoint', kwargs={'connector': 'oxyd', 'slug': oxyd.slug, 'endpoint': 'send'} + ) resp = app.post_json(endpoint_url, params=params, status=400) assert resp.json['err'] == 1 assert resp.json['err_desc'] == "'to' is a required property" @@ -165,28 +165,27 @@ def test_access_apiuser_with_no_key(app, oxyd): def test_access_apiuser_with_ip_restriction(app, oxyd): authorized_ip = '176.31.123.109' - api = ApiUser.objects.create(username='apiuser', - fullname='Api User', - description='api', - ipsource=authorized_ip + api = ApiUser.objects.create( + username='apiuser', fullname='Api User', description='api', ipsource=authorized_ip ) obj_type = ContentType.objects.get_for_model(OxydSMSGateway) - AccessRight.objects.create(codename='can_send_messages', - apiuser=api, - resource_type=obj_type, - resource_pk=oxyd.pk, + AccessRight.objects.create( + codename='can_send_messages', + apiuser=api, + resource_type=obj_type, + resource_pk=oxyd.pk, ) - endpoint_url = reverse('generic-endpoint', - kwargs={'connector': 'oxyd', 'slug': oxyd.slug, 'endpoint': 'send'}) - resp = app.post_json(endpoint_url, params={}, extra_environ={'REMOTE_ADDR': '127.0.0.1'}, - status=403) + endpoint_url = reverse( + 'generic-endpoint', kwargs={'connector': 'oxyd', 'slug': oxyd.slug, 'endpoint': 'send'} + ) + resp = app.post_json(endpoint_url, params={}, extra_environ={'REMOTE_ADDR': '127.0.0.1'}, status=403) assert resp.json['err'] == 1 assert resp.json['err_class'] == 'django.core.exceptions.PermissionDenied' - endpoint_url = reverse('generic-endpoint', - kwargs={'connector': 'oxyd', 'slug': oxyd.slug, 'endpoint': 'send'}) - resp = app.post_json(endpoint_url, params={}, - extra_environ={'REMOTE_ADDR': authorized_ip}, status=400) + endpoint_url = reverse( + 'generic-endpoint', kwargs={'connector': 'oxyd', 'slug': oxyd.slug, 'endpoint': 'send'} + ) + resp = app.post_json(endpoint_url, params={}, extra_environ={'REMOTE_ADDR': authorized_ip}, status=400) assert resp.json['err'] == 1 assert resp.json['err_desc'] == "'message' is a required property" diff --git a/tests/test_api_entreprise.py b/tests/test_api_entreprise.py index ade8c3e5..7d3055d2 100644 --- a/tests/test_api_entreprise.py +++ b/tests/test_api_entreprise.py @@ -29,211 +29,191 @@ from passerelle.apps.api_entreprise.models import APIEntreprise from utils import make_resource, FakedResponse ETABLISSEMENTS_RESPONSE = { - "etablissement": { - "siege_social": True, - "siret": "41816609600051", - "naf": "6202A", - "libelle_naf": "Conseil en systèmes et logiciels informatiques", - "date_mise_a_jour": 1449183600, - "tranche_effectif_salarie_etablissement": { - "de": 200, - "a": 249, - "code": "31", - "date_reference": "2014", - "intitule": "200 à 249 salariés" + "etablissement": { + "siege_social": True, + "siret": "41816609600051", + "naf": "6202A", + "libelle_naf": "Conseil en systèmes et logiciels informatiques", + "date_mise_a_jour": 1449183600, + "tranche_effectif_salarie_etablissement": { + "de": 200, + "a": 249, + "code": "31", + "date_reference": "2014", + "intitule": "200 à 249 salariés", + }, + "date_creation_etablissement": 1108594800, + "region_implantation": {"code": "11", "value": "Île-de-France"}, + "commune_implantation": {"code": "75108", "value": "PARIS 8"}, + "adresse": { + "l1": "OCTO TECHNOLOGY", + "l4": "50 AVENUE DES CHAMPS ELYSEES", + "l6": "75008 PARIS", + "l7": "FRANCE", + "numero_voie": "50", + "type_voie": "AV", + "nom_voie": "DES CHAMPS ELYSEES", + "code_postal": "75008", + "localite": "PARIS 8", + "code_insee_localite": "75108", + }, + "etat_administratif": {"value": "F", "date_fermeture": 1315173600}, }, - "date_creation_etablissement": 1108594800, - "region_implantation": { - "code": "11", - "value": "Île-de-France" - }, - "commune_implantation": { - "code": "75108", - "value": "PARIS 8" - }, - "adresse": { - "l1": "OCTO TECHNOLOGY", - "l4": "50 AVENUE DES CHAMPS ELYSEES", - "l6": "75008 PARIS", - "l7": "FRANCE", - "numero_voie": "50", - "type_voie": "AV", - "nom_voie": "DES CHAMPS ELYSEES", - "code_postal": "75008", - "localite": "PARIS 8", - "code_insee_localite": "75108", - }, - "etat_administratif": { - "value": "F", - "date_fermeture": 1315173600 - } - }, - "gateway_error": False + "gateway_error": False, } ENTREPRISES_RESPONSE = { - "entreprise": { - "siren": "418166096", - "capital_social": 459356, - "numero_tva_intracommunautaire": "FR16418166096", - "forme_juridique": "SA à directoire (s.a.i.)", - "forme_juridique_code": "5699", - "nom_commercial": "OCTO-TECHNOLOGY", - "procedure_collective": False, - "naf_entreprise": "6202A", - "libelle_naf_entreprise": "Conseil en systèmes et logiciels informatiques", - "raison_sociale": "OCTO-TECHNOLOGY", - "siret_siege_social": "41816609600051", - "code_effectif_entreprise": "31", - "date_creation": 891381600, - "categorie_entreprise": "PME", - "tranche_effectif_salarie_entreprise": { - "de": 200, - "a": 249, - "code": "31", - "date_reference": "2014", - "intitule": "200 à 249 salariés" + "entreprise": { + "siren": "418166096", + "capital_social": 459356, + "numero_tva_intracommunautaire": "FR16418166096", + "forme_juridique": "SA à directoire (s.a.i.)", + "forme_juridique_code": "5699", + "nom_commercial": "OCTO-TECHNOLOGY", + "procedure_collective": False, + "naf_entreprise": "6202A", + "libelle_naf_entreprise": "Conseil en systèmes et logiciels informatiques", + "raison_sociale": "OCTO-TECHNOLOGY", + "siret_siege_social": "41816609600051", + "code_effectif_entreprise": "31", + "date_creation": 891381600, + "categorie_entreprise": "PME", + "tranche_effectif_salarie_entreprise": { + "de": 200, + "a": 249, + "code": "31", + "date_reference": "2014", + "intitule": "200 à 249 salariés", + }, + "mandataires_sociaux": [ + { + "nom": "HISQUIN", + "prenom": "FRANCOIS", + "fonction": "PRESIDENT DU DIRECTOIRE", + "dirigeant": True, + "date_naissance": "1965-01-27", + "raison_sociale": "", + "identifiant": "", + "type": "PP", + }, + { + "nom": "", + "prenom": "", + "fonction": "COMMISSAIRE AUX COMPTES SUPPLEANT", + "dirigeant": True, + "date_naissance": "", + "date_naissance_timestamp": 0, + "raison_sociale": "BCRH & ASSOCIES - SOCIETE A RESPONSABILITE LIMITEE A ASSOCIE UNIQUE", + "identifiant": "490092574", + "type": "PM", + }, + ], + "etat_administratif": { + "value": "C", # A (actif) ou C (cessé) + "date_cessation": 1315173600, # null quand actif (A), un timestamp (un entier) quand cessé (C ) + }, }, - "mandataires_sociaux": [{ - "nom": "HISQUIN", - "prenom": "FRANCOIS", - "fonction": "PRESIDENT DU DIRECTOIRE", - "dirigeant": True, - "date_naissance": "1965-01-27", - "raison_sociale": "", - "identifiant": "", - "type": "PP" - }, { - "nom": "", - "prenom": "", - "fonction": "COMMISSAIRE AUX COMPTES SUPPLEANT", - "dirigeant": True, - "date_naissance": "", - "date_naissance_timestamp": 0, - "raison_sociale": "BCRH & ASSOCIES - SOCIETE A RESPONSABILITE LIMITEE A ASSOCIE UNIQUE", - "identifiant": "490092574", - "type": "PM" - } - ], - "etat_administratif": { - "value": "C", # A (actif) ou C (cessé) - "date_cessation": 1315173600 # null quand actif (A), un timestamp (un entier) quand cessé (C ) - } - }, - "etablissement_siege": { - "siege_social": True, - "siret": "41816609600051", - "naf": "6202A", - "libelle_naf": "Conseil en systèmes et logiciels informatiques", - "date_mise_a_jour": 1449183600, - "tranche_effectif_salarie_etablissement": { - "de": 200, - "a": 249, - "code": "31", - "date_reference": "2014", - "intitule": "200 à 249 salariés" + "etablissement_siege": { + "siege_social": True, + "siret": "41816609600051", + "naf": "6202A", + "libelle_naf": "Conseil en systèmes et logiciels informatiques", + "date_mise_a_jour": 1449183600, + "tranche_effectif_salarie_etablissement": { + "de": 200, + "a": 249, + "code": "31", + "date_reference": "2014", + "intitule": "200 à 249 salariés", + }, + "date_creation_etablissement": 1108594800, + "region_implantation": {"code": "11", "value": "Île-de-France"}, + "commune_implantation": {"code": "75108", "value": "PARIS 8"}, + "adresse": { + "l1": "OCTO TECHNOLOGY", + "l4": "50 AVENUE DES CHAMPS ELYSEES", + "l6": "75008 PARIS", + "l7": "FRANCE", + "numero_voie": "50", + "type_voie": "AV", + "nom_voie": "DES CHAMPS ELYSEES", + "code_postal": "75008", + "localite": "PARIS 8", + "code_insee_localite": "75108", + }, + "etat_administratif": {"value": "F", "date_fermeture": 1315173600}, }, - "date_creation_etablissement": 1108594800, - "region_implantation": { - "code": "11", - "value": "Île-de-France" - }, - "commune_implantation": { - "code": "75108", - "value": "PARIS 8" - }, - "adresse": { - "l1": "OCTO TECHNOLOGY", - "l4": "50 AVENUE DES CHAMPS ELYSEES", - "l6": "75008 PARIS", - "l7": "FRANCE", - "numero_voie": "50", - "type_voie": "AV", - "nom_voie": "DES CHAMPS ELYSEES", - "code_postal": "75008", - "localite": "PARIS 8", - "code_insee_localite": "75108", - }, - "etat_administratif": { - "value": "F", - "date_fermeture": 1315173600 - } - }, - "gateway_error": False + "gateway_error": False, } EXTRAITS_RCS_RESPONSE = { - "siren": "418166096", - "date_immatriculation": "1998-03-27", - "date_immatriculation_timestamp": 890953200, - "date_extrait": "21 AVRIL 2017", - "observations": [ - { - "date": "2000-02-23", - "date_timestamp": 951260400, - "numero": "12197", - "libelle": " LA SOCIETE NE CONSERVE AUCUNE ACTIVITE A SON ANCIEN SIEGE " - } - ] + "siren": "418166096", + "date_immatriculation": "1998-03-27", + "date_immatriculation_timestamp": 890953200, + "date_extrait": "21 AVRIL 2017", + "observations": [ + { + "date": "2000-02-23", + "date_timestamp": 951260400, + "numero": "12197", + "libelle": " LA SOCIETE NE CONSERVE AUCUNE ACTIVITE A SON ANCIEN SIEGE ", + } + ], } ASSOCIATIONS_RESPONSE = { - "association" : { - "id": "W751135389", - "titre": "ALLIANCE DU COEUR: UNION NATIONALE DES FEDERATIONS ET ASSOCIATIONS DE MALADES CARDIOVASCULAIRES", - "objet": "information, soutien, solidarité et accompagnement psycho médico social des personnes malades cardiovasculaires et de leurs proches...", - "siret": "42135938100025", - "siret_siege_social": "42135938100033", - "date_creation": "1993-02-11", - "date_declaration": "2013-06-28", - "date_publication": "1993-03-03", - "adresse_siege": { - "numero_voie": "10", - "type_voie": "RUE", - "libelle_voie": "Lebouis", - "code_insee": "75120", - "code_postal": ["75014"], - "commune": "Paris" - }, - "groupement": "Simple", - "mise_a_jour": "2013-06-28" - } + "association": { + "id": "W751135389", + "titre": "ALLIANCE DU COEUR: UNION NATIONALE DES FEDERATIONS ET ASSOCIATIONS DE MALADES CARDIOVASCULAIRES", + "objet": "information, soutien, solidarité et accompagnement psycho médico social des personnes malades cardiovasculaires et de leurs proches...", + "siret": "42135938100025", + "siret_siege_social": "42135938100033", + "date_creation": "1993-02-11", + "date_declaration": "2013-06-28", + "date_publication": "1993-03-03", + "adresse_siege": { + "numero_voie": "10", + "type_voie": "RUE", + "libelle_voie": "Lebouis", + "code_insee": "75120", + "code_postal": ["75014"], + "commune": "Paris", + }, + "groupement": "Simple", + "mise_a_jour": "2013-06-28", + } } DOCUMENTS_ASSOCIATION_RESPONSE = { - "nombre_documents": 2, - "documents": [ - { - "type": "Statuts", - "url": "https://apientreprise.fr/attestations/40ab0b07d434d0417e8997ce7c5afbef/attestation_document_association.pdf", - "timestamp": "1500660325" - }, - { - "type": "Récépissé", - "url": "https://apientreprise.fr/attestations/40ab0b07d434d0417e8997ce7c5afbef/recepisse_association.pdf", - "timestamp": "1500667325" - }, - { - "timestamp": "1337158058", - "url": "https://apientreprise.fr/attestations/40ab0b07d434d0417e8997ce7c5afbef/attestation_document_association.pdf", - "type": "Statuts" - }, - ] + "nombre_documents": 2, + "documents": [ + { + "type": "Statuts", + "url": "https://apientreprise.fr/attestations/40ab0b07d434d0417e8997ce7c5afbef/attestation_document_association.pdf", + "timestamp": "1500660325", + }, + { + "type": "Récépissé", + "url": "https://apientreprise.fr/attestations/40ab0b07d434d0417e8997ce7c5afbef/recepisse_association.pdf", + "timestamp": "1500667325", + }, + { + "timestamp": "1337158058", + "url": "https://apientreprise.fr/attestations/40ab0b07d434d0417e8997ce7c5afbef/attestation_document_association.pdf", + "type": "Statuts", + }, + ], } -EFFECTIFS_ANNUELS_ACOSS_COVID = { - "siren": "418166096", - "annee": "2019", - "effectifs_annuels": 100.5 -} +EFFECTIFS_ANNUELS_ACOSS_COVID = {"siren": "418166096", "annee": "2019", "effectifs_annuels": 100.5} ENTREPRISE_EFFECTIFS_MENSUELS_ACOSS_COVID = { "siren": "418166096", "annee": "2019", "mois": "02", - "effectifs_mensuels": 100.5 + "effectifs_mensuels": 100.5, } @@ -241,7 +221,7 @@ ETABLISSEMENT_EFFECTIFS_MENSUELS_ACOSS_COVID = { "siret": "41816609600051", "annee": "2019", "mois": "02", - "effectifs_mensuels": 100.5 + "effectifs_mensuels": 100.5, } @@ -250,18 +230,18 @@ EXERCICES_RESPONSE = { { "ca": "648374448", "date_fin_exercice": "2016-12-31T00:00:00+01:00", - "date_fin_exercice_timestamp": 1483138800 + "date_fin_exercice_timestamp": 1483138800, }, { "ca": "491463386", "date_fin_exercice": "2015-12-31T00:00:00+01:00", - "date_fin_exercice_timestamp": 1451516400 + "date_fin_exercice_timestamp": 1451516400, }, { "ca": "473899061", "date_fin_exercice": "2014-12-31T00:00:00+01:00", - "date_fin_exercice_timestamp": 1419980400 - } + "date_fin_exercice_timestamp": 1419980400, + }, ] } @@ -270,60 +250,53 @@ DOCUMENT_ASSOCIATION_RESPONSE = "binary content" REQUEST_PARAMS = {'context': 'MSP', 'object': 'demand', 'recipient': 'siret'} -@urlmatch(netloc='^entreprise.api.gouv.fr$', - path='^/v2/etablissements/') + +@urlmatch(netloc='^entreprise.api.gouv.fr$', path='^/v2/etablissements/') def api_entreprise_etablissements(url, request): return response(200, ETABLISSEMENTS_RESPONSE, request=request) -@urlmatch(netloc='^entreprise.api.gouv.fr$', - path='^/v2/entreprises/') +@urlmatch(netloc='^entreprise.api.gouv.fr$', path='^/v2/entreprises/') def api_entreprise_entreprises(url, request): return response(200, ENTREPRISES_RESPONSE, request=request) -@urlmatch(netloc='^entreprise.api.gouv.fr$', - path='^/v2/associations/') +@urlmatch(netloc='^entreprise.api.gouv.fr$', path='^/v2/associations/') def api_entreprise_associations(url, request): return response(200, ASSOCIATIONS_RESPONSE, request=request) -@urlmatch(netloc='^entreprise.api.gouv.fr$', - path='^/v2/extraits_rcs_infogreffe/') +@urlmatch(netloc='^entreprise.api.gouv.fr$', path='^/v2/extraits_rcs_infogreffe/') def api_entreprise_extraits_rcs(url, request): return response(200, EXTRAITS_RCS_RESPONSE, request=request) -@urlmatch(netloc='^entreprise.api.gouv.fr$', - path='^/v2/documents_associations/') + +@urlmatch(netloc='^entreprise.api.gouv.fr$', path='^/v2/documents_associations/') def api_entreprise_documents_associations(url, request): return response(200, DOCUMENTS_ASSOCIATION_RESPONSE, request=request) -@urlmatch(netloc='^apientreprise.fr$', - path='^/attestations/') + +@urlmatch(netloc='^apientreprise.fr$', path='^/attestations/') def api_entreprise_document_association(url, request): return response(200, DOCUMENT_ASSOCIATION_RESPONSE, request=request) -@urlmatch(netloc='^entreprise.api.gouv.fr$', - path='^/v2/effectifs_annuels_acoss_covid/') +@urlmatch(netloc='^entreprise.api.gouv.fr$', path='^/v2/effectifs_annuels_acoss_covid/') def effectifs_annuels_acoss_covid(url, request): return response(200, EFFECTIFS_ANNUELS_ACOSS_COVID, request=request) -@urlmatch(netloc='^entreprise.api.gouv.fr$', - path='^/v2/effectifs_mensuels_acoss_covid/.*/entreprise') +@urlmatch(netloc='^entreprise.api.gouv.fr$', path='^/v2/effectifs_mensuels_acoss_covid/.*/entreprise') def entreprise_effectifs_mensuels_acoss_covid(url, request): return response(200, ENTREPRISE_EFFECTIFS_MENSUELS_ACOSS_COVID, request=request) -@urlmatch(netloc='^entreprise.api.gouv.fr$', - path='^/v2/effectifs_mensuels_acoss_covid/.*/etablissement') +@urlmatch(netloc='^entreprise.api.gouv.fr$', path='^/v2/effectifs_mensuels_acoss_covid/.*/etablissement') def etablisssment_effectifs_mensuels_acoss_covid(url, request): return response(200, ETABLISSEMENT_EFFECTIFS_MENSUELS_ACOSS_COVID, request=request) -@urlmatch(netloc='^entreprise.api.gouv.fr$', - path='^/v2/exercices') +@urlmatch(netloc='^entreprise.api.gouv.fr$', path='^/v2/exercices') def api_entreprise_exercices(url, request): return response(200, EXERCICES_RESPONSE, request=request) @@ -332,10 +305,12 @@ def api_entreprise_exercices(url, request): def api_entreprise_error_500(url, request): return response(500, 'bad error happened', request=request) + @urlmatch(netloc='^entreprise.api.gouv.fr$') def api_entreprise_connection_error(url, request): raise requests.RequestException('connection timed-out') + @urlmatch(netloc='^entreprise.api.gouv.fr$') def api_entreprise_error_not_json(url, request): return response(200, 'simple text', request=request) @@ -343,20 +318,24 @@ def api_entreprise_error_not_json(url, request): @urlmatch(netloc='^entreprise.api.gouv.fr$') def api_entreprise_error_not_found(url, request): - return response(404, { - 'error': 'not_found', - 'message': u'Page not found' - }, request=request) + return response(404, {'error': 'not_found', 'message': u'Page not found'}, request=request) @pytest.yield_fixture def mock_api_entreprise(): with HTTMock( - api_entreprise_etablissements, api_entreprise_entreprises, api_entreprise_associations, - api_entreprise_extraits_rcs, api_entreprise_associations, api_entreprise_documents_associations, - api_entreprise_document_association, effectifs_annuels_acoss_covid, - entreprise_effectifs_mensuels_acoss_covid, etablisssment_effectifs_mensuels_acoss_covid, - api_entreprise_exercices): + api_entreprise_etablissements, + api_entreprise_entreprises, + api_entreprise_associations, + api_entreprise_extraits_rcs, + api_entreprise_associations, + api_entreprise_documents_associations, + api_entreprise_document_association, + effectifs_annuels_acoss_covid, + entreprise_effectifs_mensuels_acoss_covid, + etablisssment_effectifs_mensuels_acoss_covid, + api_entreprise_exercices, + ): yield None @@ -368,7 +347,8 @@ def resource(db): title='API Entreprise', description='API Entreprise', token='83c68bf0b6013c4daf3f8213f7212aa5', - recipient='recipient') + recipient='recipient', + ) @mock.patch('passerelle.utils.Request.get') @@ -388,8 +368,7 @@ def test_endpoint_with_no_params(mocked_get, app, resource): def test_entreprises_endpoint(app, resource, mock_api_entreprise): - response = app.get('/api-entreprise/test/entreprises/443170139/', - params=REQUEST_PARAMS) + response = app.get('/api-entreprise/test/entreprises/443170139/', params=REQUEST_PARAMS) data = response.json['data'] assert data['entreprise']['categorie_entreprise'] == 'PME' assert data['entreprise']['numero_tva_intracommunautaire'] == 'FR16418166096' @@ -424,8 +403,7 @@ def test_entreprises_endpoint_include_private(mocked_get, app, resource, mock_ap def test_etablissements_endpoint(app, resource, mock_api_entreprise): - response = app.get('/api-entreprise/test/etablissements/44317013900036/', - params=REQUEST_PARAMS) + response = app.get('/api-entreprise/test/etablissements/44317013900036/', params=REQUEST_PARAMS) assert 'data' in response.json data = response.json['data'] @@ -442,8 +420,7 @@ def test_etablissements_endpoint(app, resource, mock_api_entreprise): def test_associations_endpoint(app, resource, mock_api_entreprise): - response = app.get('/api-entreprise/test/associations/443170139/', - params=REQUEST_PARAMS) + response = app.get('/api-entreprise/test/associations/443170139/', params=REQUEST_PARAMS) assert 'data' in response.json data = response.json['data'] @@ -460,8 +437,7 @@ def test_associations_endpoint(app, resource, mock_api_entreprise): def test_documents_associations_endpoint(app, resource, mock_api_entreprise): - response = app.get('/api-entreprise/test/documents_associations/443170139/', - params=REQUEST_PARAMS) + response = app.get('/api-entreprise/test/documents_associations/443170139/', params=REQUEST_PARAMS) assert 'data' in response.json data = response.json['data'] assert len(data) == 3 @@ -476,21 +452,18 @@ def test_documents_associations_endpoint(app, resource, mock_api_entreprise): def test_associations_last_document_of_type_endpoint(app, resource, mock_api_entreprise): params = REQUEST_PARAMS params['document_type'] = 'Statuts' - response = app.get('/api-entreprise/test/document_association/443170139/get-last/', - params=params) + response = app.get('/api-entreprise/test/document_association/443170139/get-last/', params=params) assert 'data' in response.json data = response.json['data'] assert data['timestamp'] == '1500660325' params['document_type'] = 'Liste des dirigeants' - response = app.get('/api-entreprise/test/document_association/443170139/get-last/', - params=params) + response = app.get('/api-entreprise/test/document_association/443170139/get-last/', params=params) assert not response.json['data'] def test_extraits_rcs(app, resource, mock_api_entreprise, freezer): - response = app.get('/api-entreprise/test/extraits_rcs/443170139/', - params=REQUEST_PARAMS) + response = app.get('/api-entreprise/test/extraits_rcs/443170139/', params=REQUEST_PARAMS) assert 'data' in response.json data = response.json['data'] @@ -502,16 +475,15 @@ def test_extraits_rcs(app, resource, mock_api_entreprise, freezer): def test_document_association(app, resource, mock_api_entreprise, freezer): - response = app.get('/api-entreprise/test/documents_associations/443170139/', - params=REQUEST_PARAMS) + response = app.get('/api-entreprise/test/documents_associations/443170139/', params=REQUEST_PARAMS) assert 'data' in response.json data = response.json['data'] assert len(data) == 3 document = data[0] assert 'url' in document - resp = app.get(document['url'], - params={'context': 'MSP', 'object': 'demand', 'recipient': 'siret'}, - status=200) + resp = app.get( + document['url'], params={'context': 'MSP', 'object': 'demand', 'recipient': 'siret'}, status=200 + ) # try to get document with wrong signature url = document['url'] wrong_url = document['url'] + "wrong/" @@ -523,8 +495,7 @@ def test_document_association(app, resource, mock_api_entreprise, freezer): def test_effectifs_annuels_acoss_covid(app, resource, mock_api_entreprise, freezer): - response = app.get('/api-entreprise/test/effectifs_annuels_acoss_covid/418166096/', - params=REQUEST_PARAMS) + response = app.get('/api-entreprise/test/effectifs_annuels_acoss_covid/418166096/', params=REQUEST_PARAMS) data = response.json['data'] assert data['siren'] == '418166096' assert data['annee'] == '2019' @@ -532,8 +503,10 @@ def test_effectifs_annuels_acoss_covid(app, resource, mock_api_entreprise, freez def test_entreprise_effectifs_mensuels_acoss_covid(app, resource, mock_api_entreprise, freezer): - response = app.get('/api-entreprise/test/entreprise_effectifs_mensuels_acoss_covid/2019/02/418166096/', - params=REQUEST_PARAMS) + response = app.get( + '/api-entreprise/test/entreprise_effectifs_mensuels_acoss_covid/2019/02/418166096/', + params=REQUEST_PARAMS, + ) data = response.json['data'] assert data['siren'] == '418166096' assert data['annee'] == '2019' @@ -542,8 +515,10 @@ def test_entreprise_effectifs_mensuels_acoss_covid(app, resource, mock_api_entre def test_etablissment_effectifs_mensuels_acoss_covid(app, resource, mock_api_entreprise, freezer): - response = app.get('/api-entreprise/test/etablissement_effectifs_mensuels_acoss_covid/2019/02/418166096/', - params=REQUEST_PARAMS) + response = app.get( + '/api-entreprise/test/etablissement_effectifs_mensuels_acoss_covid/2019/02/418166096/', + params=REQUEST_PARAMS, + ) data = response.json['data'] assert data['siret'] == '41816609600051' assert data['annee'] == '2019' @@ -564,8 +539,7 @@ def test_exercices(app, resource, mock_api_entreprise, freezer): def test_error_500(app, resource, mock_api_entreprise): with HTTMock(api_entreprise_error_500): - response = app.get('/api-entreprise/test/entreprises/443170139/', - params=REQUEST_PARAMS) + response = app.get('/api-entreprise/test/entreprises/443170139/', params=REQUEST_PARAMS) assert response.status_code == 200 assert response.json['err'] == 1 assert response.json['data']['status_code'] == 500 @@ -574,17 +548,18 @@ def test_error_500(app, resource, mock_api_entreprise): def test_no_json_error(app, resource, mock_api_entreprise): with HTTMock(api_entreprise_error_not_json): - response = app.get('/api-entreprise/test/entreprises/443170139/', - params=REQUEST_PARAMS) + response = app.get('/api-entreprise/test/entreprises/443170139/', params=REQUEST_PARAMS) assert response.status_code == 200 assert response.json['err'] == 1 - assert response.json['err_desc'] == "API-entreprise returned non-JSON content with status 200: simple text" + assert ( + response.json['err_desc'] + == "API-entreprise returned non-JSON content with status 200: simple text" + ) def test_error_404(app, resource, mock_api_entreprise): with HTTMock(api_entreprise_error_not_found): - response = app.get('/api-entreprise/test/entreprises/443170139/', - params=REQUEST_PARAMS) + response = app.get('/api-entreprise/test/entreprises/443170139/', params=REQUEST_PARAMS) assert response.status_code == 200 assert response.json['err'] == 1 assert response.json['err_desc'] == 'Page not found' @@ -592,8 +567,7 @@ def test_error_404(app, resource, mock_api_entreprise): def test_connection_error(app, resource, mock_api_entreprise): with HTTMock(api_entreprise_connection_error): - response = app.get('/api-entreprise/test/entreprises/443170139/', - params=REQUEST_PARAMS) + response = app.get('/api-entreprise/test/entreprises/443170139/', params=REQUEST_PARAMS) assert response.status_code == 200 assert response.json['err'] == 1 assert response.json['err_desc'] == 'API-entreprise connection error: connection timed-out' diff --git a/tests/test_api_particulier.py b/tests/test_api_particulier.py index 8cac8a5b..e08e0bb7 100644 --- a/tests/test_api_particulier.py +++ b/tests/test_api_particulier.py @@ -37,18 +37,15 @@ SVAIR_RESPONSE = { "nom": "Martin", "nomNaissance": "Martin", "prenoms": "Pierre", - "dateNaissance": "22/03/1985" + "dateNaissance": "22/03/1985", }, "declarant2": { "nom": "Martin", "nomNaissance": "Honore", "prenoms": "Marie", - "dateNaissance": "03/04/1986" - }, - "foyerFiscal": { - "annee": 2015, - "adresse": "12 rue Balzac 75008 Paris" + "dateNaissance": "03/04/1986", }, + "foyerFiscal": {"annee": 2015, "adresse": "12 rue Balzac 75008 Paris"}, "dateRecouvrement": "10/10/2015", "dateEtablissement": "08/07/2015", "nombreParts": 2, @@ -60,7 +57,7 @@ SVAIR_RESPONSE = { "montantImpot": 2165, "revenuFiscalReference": 29880, "anneeImpots": "2015", - "anneeRevenus": "2014" + "anneeRevenus": "2014", } CAF_FAMILLE = { @@ -69,40 +66,25 @@ CAF_FAMILLE = { "complementIdentiteGeo": "ESCALIER B", "identite": "Madame MARIE DUPONT", "numeroRue": "123 RUE BIDON", - "pays": "FRANCE" + "pays": "FRANCE", }, "allocataires": [ - { - "dateDeNaissance": "12111971", - "nomPrenom": "MARIE DUPONT", - "sexe": "F" - }, - { - "dateDeNaissance": "18101969", - "nomPrenom": "JEAN DUPONT", - "sexe": "M" - } + {"dateDeNaissance": "12111971", "nomPrenom": "MARIE DUPONT", "sexe": "F"}, + {"dateDeNaissance": "18101969", "nomPrenom": "JEAN DUPONT", "sexe": "M"}, ], "annee": 2017, - "enfants": [ - { - "dateDeNaissance": "11122016", - "nomPrenom": "LUCIE DUPONT", - "sexe": "F" - } - ], + "enfants": [{"dateDeNaissance": "11122016", "nomPrenom": "LUCIE DUPONT", "sexe": "F"}], "mois": 4, - "quotientFamilial": 1754 + "quotientFamilial": 1754, } -@urlmatch(netloc=r'^particulier.*\.api\.gouv\.fr$', - path=r'^/api/v2/avis-imposition$') +@urlmatch(netloc=r'^particulier.*\.api\.gouv\.fr$', path=r'^/api/v2/avis-imposition$') def api_particulier_v2_avis_imposition(url, request): return response(200, SVAIR_RESPONSE, request=request) -@urlmatch(netloc=r'^particulier.*\.api\.gouv\.fr$', - path=r'^/api/v2/composition-familiale$') + +@urlmatch(netloc=r'^particulier.*\.api\.gouv\.fr$', path=r'^/api/v2/composition-familiale$') def api_particulier_v2_situation_familiale(url, request): return response(200, CAF_FAMILLE, request=request) @@ -124,26 +106,32 @@ def api_particulier_error_not_json(url, request): @urlmatch(netloc=r'^particulier.*\.api\.gouv\.fr$') def api_particulier_error_not_found(url, request): - return response(404, { - 'error': 'not_found', - 'message': u'Les paramètres fournis sont incorrects ou ne correspondent pas à un avis' - }, request=request) + return response( + 404, + { + 'error': 'not_found', + 'message': u'Les paramètres fournis sont incorrects ou ne correspondent pas à un avis', + }, + request=request, + ) @urlmatch(netloc=r'^particulier.*\.api\.gouv\.fr$') def api_particulier_error_not_found_caf(url, request): - return response(404, { - 'error': 'not_found', - 'message': 'Dossier allocataire inexistant. Le document ne peut être édité.' - }, request=request) + return response( + 404, + {'error': 'not_found', 'message': 'Dossier allocataire inexistant. Le document ne peut être édité.'}, + request=request, + ) @urlmatch(netloc=r'^particulier.*\.api\.gouv\.fr$') def api_particulier_error_not_found_deregistrated(url, request): - return response(404, { - 'error': 'not_found', - 'message': 'Dossier radié. Le document ne peut être édité.' - }, request=request) + return response( + 404, + {'error': 'not_found', 'message': 'Dossier radié. Le document ne peut être édité.'}, + request=request, + ) @pytest.yield_fixture @@ -160,76 +148,68 @@ def resource(db): title='API Particulier Prod', description='API Particulier Prod', platform='test', - api_key='83c68bf0b6013c4daf3f8213f7212aa5') + api_key='83c68bf0b6013c4daf3f8213f7212aa5', + ) def test_error(app, resource, mock_api_particulier): vector = [ - (['impots_svair', 'avis-imposition'], { - 'numero_fiscal': '1234567890123', - 'reference_avis': '3210987654321', - }), - (['caf_famille', 'situation-familiale'], { - 'code_postal': 12, - 'numero_allocataire': 15 - }), + ( + ['impots_svair', 'avis-imposition'], + { + 'numero_fiscal': '1234567890123', + 'reference_avis': '3210987654321', + }, + ), + (['caf_famille', 'situation-familiale'], {'code_postal': 12, 'numero_allocataire': 15}), ] with HTTMock(api_particulier_error_500): + def do(endpoint, params): - resp = endpoint_get( - '/api-particulier/test/%s' % endpoint, - app, - resource, - endpoint, - params=params) + resp = endpoint_get('/api-particulier/test/%s' % endpoint, app, resource, endpoint, params=params) assert resp.status_code == 200 assert resp.json['err'] == 1 assert resp.json['data']['status_code'] == 500 assert resp.json['data']['code'] == 'non-200' + for endpoints, params in vector: for endpoint in endpoints: do(endpoint, params) with HTTMock(api_particulier_error_not_json): + def do(endpoint, params): - resp = endpoint_get( - '/api-particulier/test/%s' % endpoint, - app, - resource, - endpoint, - params=params) + resp = endpoint_get('/api-particulier/test/%s' % endpoint, app, resource, endpoint, params=params) assert resp.status_code == 200 assert resp.json['err'] == 1 assert 'returned non-JSON content' in resp.json['err_desc'] assert resp.json['data']['code'] == 'non-json' + for endpoints, params in vector: for endpoint in endpoints: do(endpoint, params) with HTTMock(api_particulier_error_not_found): + def do(endpoint, params): - resp = endpoint_get( - '/api-particulier/test/%s' % endpoint, - app, - resource, - endpoint, - params=params) + resp = endpoint_get('/api-particulier/test/%s' % endpoint, app, resource, endpoint, params=params) assert resp.status_code == 200 assert resp.json['err'] == 1 assert 'incorrects ou ne correspondent pas' in resp.json['err_desc'] assert resp.json['data']['code'] == 'not-found' + for endpoints, params in vector: for endpoint in endpoints: do(endpoint, params) with HTTMock(api_particulier_connection_error): + def do(endpoint, params): - resp = endpoint_get( - '/api-particulier/test/%s' % endpoint, - app, - resource, - endpoint, - params=params) + resp = endpoint_get('/api-particulier/test/%s' % endpoint, app, resource, endpoint, params=params) assert resp.status_code == 200 assert resp.json['err'] == 1 - assert resp.json['err_desc'] == 'API-particulier platform "test" connection error: connection timed-out' + assert ( + resp.json['err_desc'] + == 'API-particulier platform "test" connection error: connection timed-out' + ) + for endpoints, params in vector: for endpoint in endpoints: do(endpoint, params) @@ -243,7 +223,8 @@ def test_error(app, resource, mock_api_particulier): 'numero_fiscal': ' 1234567890', # too short 'reference_avis': '3210987654321', 'user': 'John Doe', - }) + }, + ) assert resp.status_code == 200 assert resp.json['err'] == 1 assert resp.json['data'] is None @@ -257,7 +238,8 @@ def test_error(app, resource, mock_api_particulier): 'numero_fiscal': '1234567890123', 'reference_avis': '32109876543 ', # too short 'user': 'John Doe', - }) + }, + ) assert resp.status_code == 200 assert resp.json['err'] == 1 assert resp.json['data'] is None @@ -274,7 +256,8 @@ def test_avis_imposition(app, resource, mock_api_particulier): 'numero_fiscal': '1234567890123', 'reference_avis': '3210987654321', 'user': 'John Doe', - }) + }, + ) assert resp.status_code == 200 assert resp.json['data']['montantImpot'] == 2165 assert resp.json['err'] == 0 @@ -285,10 +268,11 @@ def test_avis_imposition(app, resource, mock_api_particulier): resource, 'avis-imposition', params={ - 'numero_fiscal': '1234567890123X', # 14 chars : will be cutted + 'numero_fiscal': '1234567890123X', # 14 chars : will be cutted 'reference_avis': '3210987654321X', # idem 'user': 'John Doe', - }) + }, + ) assert resp.status_code == 200 assert resp.json['data']['montantImpot'] == 2165 assert resp.json['err'] == 0 @@ -304,27 +288,36 @@ def test_situation_familiale(app, resource, mock_api_particulier): 'code_postal': '99148', 'numero_allocataire': '000354', 'user': 'John Doe', - }) + }, + ) assert resp.json['data']['adresse']['codePostalVille'] == '12345 CONDAT' def test_detail_page(app, resource, admin_user): login(app) - response = app.get(reverse('view-connector', kwargs={ - 'connector': 'api-particulier', - 'slug': 'test', - })) + response = app.get( + reverse( + 'view-connector', + kwargs={ + 'connector': 'api-particulier', + 'slug': 'test', + }, + ) + ) assert 'API Particulier Prod' in response.text assert 'family allowance' in response.text assert 'fiscal information' in response.text @pytest.mark.parametrize( - 'mock,should_log', [ - (api_particulier_error_not_found, False), (api_particulier_error_500, True), - (api_particulier_error_not_json, True), (api_particulier_error_not_found_caf, False), - (api_particulier_error_not_found_deregistrated, False) - ] + 'mock,should_log', + [ + (api_particulier_error_not_found, False), + (api_particulier_error_500, True), + (api_particulier_error_not_json, True), + (api_particulier_error_not_found_caf, False), + (api_particulier_error_not_found_deregistrated, False), + ], ) def test_api_particulier_dont_log_not_found(app, resource, mock, should_log): with HTTMock(mock): @@ -336,7 +329,8 @@ def test_api_particulier_dont_log_not_found(app, resource, mock, should_log): params={ 'numero_fiscal': '1234567890123', 'reference_avis': '3210987654321', - }) + }, + ) logs = ResourceLog.objects.all() assert logs.count() == 3 if should_log: diff --git a/tests/test_arcgis.py b/tests/test_arcgis.py index 17eae5ad..4a14b3a3 100644 --- a/tests/test_arcgis.py +++ b/tests/test_arcgis.py @@ -86,22 +86,16 @@ STATES = '''{ @pytest.fixture def arcgis(): - return ArcGIS.objects.create(slug='test', - base_url='https://arcgis.example.net/') + return ArcGIS.objects.create(slug='test', base_url='https://arcgis.example.net/') def test_arcgis_mapservice_query(app, arcgis): endpoint = utils.generic_endpoint_url('arcgis', 'mapservice-query', slug=arcgis.slug) assert endpoint == '/arcgis/test/mapservice-query' - params = { - 'folder': 'fold', - 'service': 'serv', - 'layer': '1' - } + params = {'folder': 'fold', 'service': 'serv', 'layer': '1'} with mock.patch('passerelle.utils.Request.get') as requests_get: - requests_get.return_value = utils.FakedResponse(content=STATES, - status_code=200) + requests_get.return_value = utils.FakedResponse(content=STATES, status_code=200) resp = app.get(endpoint, params=params, status=403) assert requests_get.call_count == 0 @@ -111,11 +105,14 @@ def test_arcgis_mapservice_query(app, arcgis): # open access api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(arcgis) - AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, - resource_pk=arcgis.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=arcgis.pk + ) resp = app.get(endpoint, params=params, status=200) assert requests_get.call_count == 1 - assert requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + assert ( + requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + ) args = requests_get.call_args[1]['params'] assert args['f'] == 'json' assert args['outFields'] == '*' @@ -131,7 +128,9 @@ def test_arcgis_mapservice_query(app, arcgis): params['full'] = 'on' resp = app.get(endpoint, params=params, status=200) assert requests_get.call_count == 2 - assert requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + assert ( + requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + ) args = requests_get.call_args[1]['params'] assert args['f'] == 'json' assert args['outFields'] == '*' @@ -147,7 +146,9 @@ def test_arcgis_mapservice_query(app, arcgis): params['q'] = 'Texas' resp = app.get(endpoint, params=params, status=200) assert requests_get.call_count == 3 - assert requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + assert ( + requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + ) args = requests_get.call_args[1]['params'] assert args['text'] == 'Texas' assert 'where' not in args @@ -156,7 +157,9 @@ def test_arcgis_mapservice_query(app, arcgis): params['lon'] = '1.12345' resp = app.get(endpoint, params=params, status=200) assert requests_get.call_count == 4 - assert requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + assert ( + requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + ) args = requests_get.call_args[1]['params'] assert args['geometry'] == '1.12345,9.87654' assert args['geometryType'] == 'esriGeometryPoint' @@ -164,7 +167,9 @@ def test_arcgis_mapservice_query(app, arcgis): del params['lat'] # missing lat, do not search by geometry resp = app.get(endpoint, params=params, status=200) assert requests_get.call_count == 5 - assert requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + assert ( + requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + ) args = requests_get.call_args[1]['params'] assert 'geometry' not in args assert 'geometryType' not in args @@ -172,7 +177,9 @@ def test_arcgis_mapservice_query(app, arcgis): params.update({'latmin': '1', 'lonmin': '2', 'latmax': '3', 'lonmax': '4'}) resp = app.get(endpoint, params=params, status=200) assert requests_get.call_count == 6 - assert requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + assert ( + requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + ) args = requests_get.call_args[1]['params'] assert args['geometry'] == '2.0,1.0,4.0,3.0' assert args['geometryType'] == 'esriGeometryEnvelope' @@ -180,7 +187,9 @@ def test_arcgis_mapservice_query(app, arcgis): del params['latmin'] # incomplete box, do not search by geometry resp = app.get(endpoint, params=params, status=200) assert requests_get.call_count == 7 - assert requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + assert ( + requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + ) args = requests_get.call_args[1]['params'] assert 'geometry' not in args assert 'geometryType' not in args @@ -190,7 +199,9 @@ def test_arcgis_mapservice_query(app, arcgis): params.update({'latmin': '1', 'lonmin': '2', 'latmax': '3', 'lonmax': '4'}) resp = app.get(endpoint, params=params, status=200) assert requests_get.call_count == 8 - assert requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + assert ( + requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + ) args = requests_get.call_args[1]['params'] assert args['geometry'] == '2.0,1.0,4.0,3.0' assert args['geometryType'] == 'esriGeometryEnvelope' @@ -199,7 +210,10 @@ def test_arcgis_mapservice_query(app, arcgis): # folder params['folder'] = 'foo/bar' resp = app.get(endpoint, params=params, status=200) - assert requests_get.call_args[0][0] == 'https://arcgis.example.net/services/foo/bar/serv/MapServer/1/query' + assert ( + requests_get.call_args[0][0] + == 'https://arcgis.example.net/services/foo/bar/serv/MapServer/1/query' + ) del params['folder'] resp = app.get(endpoint, params=params, status=200) assert requests_get.call_args[0][0] == 'https://arcgis.example.net/services/serv/MapServer/1/query' @@ -208,8 +222,7 @@ def test_arcgis_mapservice_query(app, arcgis): resp = app.get(endpoint, params={'service': 'srv'}, status=200) assert requests_get.call_args[0][0] == 'https://arcgis.example.net/services/srv/MapServer/0/query' args = requests_get.call_args[1]['params'] - assert args == {'f': 'json', 'inSR': '4326', 'outSR': '4326', - 'outFields': '*', 'where': '1=1'} + assert args == {'f': 'json', 'inSR': '4326', 'outSR': '4326', 'outFields': '*', 'where': '1=1'} # distance resp = app.get(endpoint, params={'service': 'srv', 'distance': '100'}, status=200) @@ -217,8 +230,11 @@ def test_arcgis_mapservice_query(app, arcgis): args = requests_get.call_args[1]['params'] assert args['distance'] == '100' assert args['units'] == 'esriSRUnit_Meter' # default unit - resp = app.get(endpoint, params={'service': 'srv', 'distance': '5', 'units': - 'esriSRUnit_NauticalMile'}, status=200) + resp = app.get( + endpoint, + params={'service': 'srv', 'distance': '5', 'units': 'esriSRUnit_NauticalMile'}, + status=200, + ) assert requests_get.call_args[0][0] == 'https://arcgis.example.net/services/srv/MapServer/0/query' args = requests_get.call_args[1]['params'] assert args['distance'] == '5' @@ -226,8 +242,7 @@ def test_arcgis_mapservice_query(app, arcgis): # call errors with mock.patch('passerelle.utils.Request.get') as requests_get: - requests_get.return_value = utils.FakedResponse(content=STATES, - status_code=200) + requests_get.return_value = utils.FakedResponse(content=STATES, status_code=200) resp = app.get(endpoint, params={}, status=400) assert requests_get.call_count == 0 assert resp.json['err'] == 1 @@ -240,19 +255,25 @@ def test_arcgis_mapservice_query(app, arcgis): assert resp.json['err_class'] == 'passerelle.utils.jsonresponse.APIError' assert resp.json['err_desc'] == ' and must be floats' - resp = app.get(endpoint, params={'service': 'src', 'latmin': '0', 'lonmin': 'y', - 'latmax': '0', 'lonmax': '1'}, status=400) + resp = app.get( + endpoint, + params={'service': 'src', 'latmin': '0', 'lonmin': 'y', 'latmax': '0', 'lonmax': '1'}, + status=400, + ) assert requests_get.call_count == 0 assert resp.json['err'] == 1 assert resp.json['err_class'] == 'passerelle.utils.jsonresponse.APIError' assert resp.json['err_desc'] == ' and must be floats' -@pytest.mark.parametrize('format_string,fail', [ - ('x {é}', True), - ('x {aa.bb}', True), - ('x {a:s} {b:d}', False), -]) +@pytest.mark.parametrize( + 'format_string,fail', + [ + ('x {é}', True), + ('x {aa.bb}', True), + ('x {a:s} {b:d}', False), + ], +) def test_validate_where(format_string, fail): if fail: with pytest.raises(ValidationError): @@ -261,17 +282,28 @@ def test_validate_where(format_string, fail): validate_where(format_string) -@pytest.mark.parametrize('format_string,kwargs,expected', [ - ('adresse LIKE {adresse:s}', {'adresse': "AVENUE D'ANNAM"}, "adresse LIKE 'AVENUE D''ANNAM'"), - ('adresse LIKE {adresse:s} AND population < {pop:d}', { - 'adresse': "AVENUE D'ANNAM", - 'pop': '34', - }, "adresse LIKE 'AVENUE D''ANNAM' AND population < 34"), - ('adresse LIKE {adresse:s} AND population < {pop:d}', { - 'adresse': "AVENUE D'ANNAM", - 'pop': 'x', - }, ValueError), -]) +@pytest.mark.parametrize( + 'format_string,kwargs,expected', + [ + ('adresse LIKE {adresse:s}', {'adresse': "AVENUE D'ANNAM"}, "adresse LIKE 'AVENUE D''ANNAM'"), + ( + 'adresse LIKE {adresse:s} AND population < {pop:d}', + { + 'adresse': "AVENUE D'ANNAM", + 'pop': '34', + }, + "adresse LIKE 'AVENUE D''ANNAM' AND population < 34", + ), + ( + 'adresse LIKE {adresse:s} AND population < {pop:d}', + { + 'adresse': "AVENUE D'ANNAM", + 'pop': 'x', + }, + ValueError, + ), + ], +) def test_sql_formatter(format_string, kwargs, expected): formatter = SqlFormatter() if not isinstance(expected, type) or not issubclass(expected, Exception): @@ -293,7 +325,8 @@ def query(arcgis): folder='fold', layer='1', service='serv', - where='adress LIKE {adress:s}') + where='adress LIKE {adress:s}', + ) def test_query_q_method(arcgis, query, rf): @@ -311,27 +344,22 @@ def test_query_q_method(arcgis, query, rf): 'meta': {}, } with mock.patch('passerelle.utils.Request.get') as requests_get: - requests_get.return_value = utils.FakedResponse(content=json.dumps(arcgis_response), - status_code=200) + requests_get.return_value = utils.FakedResponse(content=json.dumps(arcgis_response), status_code=200) assert query.q(rf.get('/', data={'adress': "AVENUE D'ANNAM"}), full=True) == { "data": [ { - "attributes": { - "ident": "1234", - "address": "rue du calvaire", - "codepost": 13200 - }, + "attributes": {"ident": "1234", "address": "rue du calvaire", "codepost": 13200}, "geo": {}, "id": "1234", - "text": "rue du calvaire - 13200" + "text": "rue du calvaire - 13200", } ], - "metadata": { - "meta": {} - } + "metadata": {"meta": {}}, } assert requests_get.call_count == 1 - assert requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + assert ( + requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + ) args = requests_get.call_args[1]['params'] assert args == { 'f': 'json', @@ -347,8 +375,7 @@ def test_q_endpoint(arcgis, query, app): assert endpoint == '/arcgis/test/q/adresses/' with mock.patch('passerelle.utils.Request.get') as requests_get: - requests_get.return_value = utils.FakedResponse(content=STATES, - status_code=200) + requests_get.return_value = utils.FakedResponse(content=STATES, status_code=200) resp = app.get(endpoint, params={}, status=403) assert requests_get.call_count == 0 @@ -358,11 +385,14 @@ def test_q_endpoint(arcgis, query, app): # open access api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(arcgis) - AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, - resource_pk=arcgis.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=arcgis.pk + ) resp = app.get(endpoint, params={'adress': "AVENUE D'ANNAM"}, status=200) assert requests_get.call_count == 1 - assert requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + assert ( + requests_get.call_args[0][0] == 'https://arcgis.example.net/services/fold/serv/MapServer/1/query' + ) args = requests_get.call_args[1]['params'] assert args == { 'f': 'json', @@ -384,7 +414,8 @@ def test_tile_endpoint(arcgis, app): assert requests_get.call_args[0][0] == ( 'https://arcgis.example.net/layer1/MapServer/export' '?dpi=96&format=png24&bboxSR=4326&imageSR=3857&transparent=true&size=256,256&f=image' - '&bbox=7.119141,43.612217,7.163086,43.580391') + '&bbox=7.119141,43.612217,7.163086,43.580391' + ) assert resp.content_type == 'image/png' # test layer and folders @@ -396,7 +427,8 @@ def test_tile_endpoint(arcgis, app): assert requests_get.call_args[0][0] == ( 'https://arcgis.example.net/layer1/foo/bar/MapServer/export' '?dpi=96&format=png24&bboxSR=4326&imageSR=3857&transparent=true&size=256,256&f=image' - '&bbox=7.119141,43.612217,7.163086,43.580391') + '&bbox=7.119141,43.612217,7.163086,43.580391' + ) assert resp.content_type == 'image/png' # test missing trailing slash @@ -410,7 +442,8 @@ def test_tile_endpoint(arcgis, app): assert requests_get.call_args[0][0] == ( 'https://arcgis.example.net/layer1/MapServer/export' '?dpi=96&format=png24&bboxSR=4326&imageSR=3857&transparent=true&size=256,256&f=image' - '&bbox=7.119141,43.612217,7.163086,43.580391') + '&bbox=7.119141,43.612217,7.163086,43.580391' + ) assert resp.content_type == 'image/png' @@ -429,9 +462,7 @@ def test_arcgis_query_unicity(admin_user, app, arcgis): slug='test-query', ) - arcgis2 = ArcGIS.objects.create( - slug='test2', - base_url='https://arcgis.example.net/') + arcgis2 = ArcGIS.objects.create(slug='test2', base_url='https://arcgis.example.net/') Query.objects.create( resource=arcgis2, name='Foo Bar', diff --git a/tests/test_arcgis_nancy.py b/tests/test_arcgis_nancy.py index 0ce19b31..ac9d0d33 100644 --- a/tests/test_arcgis_nancy.py +++ b/tests/test_arcgis_nancy.py @@ -21,26 +21,24 @@ def get_file_content(filename): class MockedRequestsResponse(mock.Mock): - def json(self): return json_loads(self.content) @pytest.fixture def setup(db): - api = ApiUser.objects.create(username='all', - keytype='', key='') + api = ApiUser.objects.create(username='all', keytype='', key='') arcgis = ArcGIS.objects.create(base_url='https://example.net/layer/0', slug='test') obj_type = ContentType.objects.get_for_model(arcgis) - AccessRight.objects.create(codename='can_access', apiuser=api, - resource_type=obj_type, resource_pk=arcgis.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=arcgis.pk + ) return arcgis @pytest.fixture def url(): - return reverse('generic-endpoint', kwargs={ - 'connector': 'arcgis', 'slug': 'test', 'endpoint': 'district'}) + return reverse('generic-endpoint', kwargs={'connector': 'arcgis', 'slug': 'test', 'endpoint': 'district'}) def test_get_district_parameters_error(app, setup, url): @@ -51,8 +49,8 @@ def test_get_district_parameters_error(app, setup, url): @mock.patch('passerelle.utils.Request.get') def test_get_district(mocked_get, app, setup, url): mocked_get.return_value = MockedRequestsResponse( - content=get_file_content('sigresponse.json'), - status_code=200) + content=get_file_content('sigresponse.json'), status_code=200 + ) resp = app.get(url, params={'lon': 6.172122, 'lat': 48.673836}, status=200) data = resp.json['data'] @@ -63,18 +61,17 @@ def test_get_district(mocked_get, app, setup, url): @mock.patch('passerelle.utils.Request.get') def test_get_all_district(mocked_get, app, setup, url): mocked_get.return_value = MockedRequestsResponse( - content=get_file_content('all_districts.json'), - status_code=200) + content=get_file_content('all_districts.json'), status_code=200 + ) resp = app.get(url, status=200) data = resp.json['data'] assert len(data) == 7 + @mock.patch('passerelle.utils.Request.get') def test_no_district(mocked_get, app, setup, url): - mocked_get.return_value = MockedRequestsResponse( - content='{"features": []}', - status_code=200) + mocked_get.return_value = MockedRequestsResponse(content='{"features": []}', status_code=200) resp = app.get(url, status=200) assert resp.json['err'] == 1 diff --git a/tests/test_arpege_ecp.py b/tests/test_arpege_ecp.py index d35fb3fb..7079ba2c 100644 --- a/tests/test_arpege_ecp.py +++ b/tests/test_arpege_ecp.py @@ -60,19 +60,22 @@ FAKE_USER_DEMANDS_RESPONSE = """{ } }""" + @pytest.fixture def connector(db): - resource = ArpegeECP.objects.create(slug='test', - webservice_base_url = 'http://arpege.net', - hawk_auth_id = 'id', hawk_auth_key = 'secret') + resource = ArpegeECP.objects.create( + slug='test', webservice_base_url='http://arpege.net', hawk_auth_id='id', hawk_auth_key='secret' + ) return utils.setup_access_rights(resource) + @mock.patch('passerelle.utils.Request.get') def test_check_status(mocked_get, connector): mocked_get.return_value = utils.FakedResponse(content=FAKE_HELLO_RESPONSE, status_code=200) resp = connector.check_status() assert resp['data'] == u'InteropAPI v1 (c) Arpège 2017' + @mock.patch('passerelle.utils.Request.get') def test_check_status(mocked_get, connector): hello_response = json.loads(FAKE_HELLO_RESPONSE) @@ -104,8 +107,9 @@ def test_get_access_token(connector): token = connector.get_access_token('nameid') assert 'no JSON content' in str(error.value) - with utils.mock_url(response='{"IsSuccess": false, "CodErreur": "Fail", "LibErreur": "Auth FAIL"}', - status_code=200): + with utils.mock_url( + response='{"IsSuccess": false, "CodErreur": "Fail", "LibErreur": "Auth FAIL"}', status_code=200 + ): with pytest.raises(APIError) as error: token = connector.get_access_token('nameid') assert str(error.value) == 'Auth FAIL (Fail)' @@ -114,8 +118,15 @@ def test_get_access_token(connector): @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') def test_get_user_forms(mocked_post, mocked_get, app, connector): - endpoint = reverse('generic-endpoint', kwargs={ - 'connector': 'arpege-ecp', 'slug': connector.slug, 'endpoint': 'api', 'rest': 'users/nameid/forms'}) + endpoint = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'arpege-ecp', + 'slug': connector.slug, + 'endpoint': 'api', + 'rest': 'users/nameid/forms', + }, + ) assert endpoint == '/arpege-ecp/test/api/users/nameid/forms' mocked_post.return_value = utils.FakedResponse(content=FAKE_LOGIN_OIDC_RESPONSE, status_code=200) @@ -127,7 +138,10 @@ def test_get_user_forms(mocked_post, mocked_get, app, connector): assert item['status'] == 'Deposee' assert item['title'] == 'Test A' assert item['name'] == 'Test A' - assert item['url'] == 'https://www.espace-citoyens.net/integration01/espace-citoyens/Demande/SuiviDemande/WI' + assert ( + item['url'] + == 'https://www.espace-citoyens.net/integration01/espace-citoyens/Demande/SuiviDemande/WI' + ) assert item['form_receipt_datetime'] == '2018-05-11T09:50:46' assert item['form_receipt_time'] == '09:50:46' assert item['readable'] == True @@ -137,12 +151,21 @@ def test_get_user_forms(mocked_post, mocked_get, app, connector): @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') def test_get_user_forms_failure(mocked_post, mocked_get, app, connector): - endpoint = reverse('generic-endpoint', kwargs={ - 'connector': 'arpege-ecp', 'slug': connector.slug, 'endpoint': 'api', 'rest': 'users/nameid/forms'}) + endpoint = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'arpege-ecp', + 'slug': connector.slug, + 'endpoint': 'api', + 'rest': 'users/nameid/forms', + }, + ) mocked_post.return_value = utils.FakedResponse(content=FAKE_LOGIN_OIDC_RESPONSE, status_code=200) - mocked_get.return_value = utils.FakedResponse(content='{"IsSuccess": false, "CodErreur": "Fail", "LibErreur": "Failed to get demands"}', - status_code=200) + mocked_get.return_value = utils.FakedResponse( + content='{"IsSuccess": false, "CodErreur": "Fail", "LibErreur": "Failed to get demands"}', + status_code=200, + ) resp = app.get(endpoint) result = resp.json assert result['err'] == 1 @@ -150,8 +173,15 @@ def test_get_user_forms_failure(mocked_post, mocked_get, app, connector): def test_get_user_forms_failure_404(app, connector): - endpoint = reverse('generic-endpoint', kwargs={ - 'connector': 'arpege-ecp', 'slug': connector.slug, 'endpoint': 'api', 'rest': 'users/nameid/forms'}) + endpoint = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'arpege-ecp', + 'slug': connector.slug, + 'endpoint': 'api', + 'rest': 'users/nameid/forms', + }, + ) with utils.mock_url(url='/LoginParSubOIDC', response=FAKE_LOGIN_OIDC_RESPONSE): with utils.mock_url(url='/DemandesUsager', status_code=404): @@ -165,8 +195,15 @@ def test_get_user_forms_failure_404(app, connector): @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') def test_get_user_forms_failure_no_json(mocked_post, mocked_get, app, connector): - endpoint = reverse('generic-endpoint', kwargs={ - 'connector': 'arpege-ecp', 'slug': connector.slug, 'endpoint': 'api', 'rest': 'users/nameid/forms'}) + endpoint = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'arpege-ecp', + 'slug': connector.slug, + 'endpoint': 'api', + 'rest': 'users/nameid/forms', + }, + ) mocked_post.return_value = utils.FakedResponse(content=FAKE_LOGIN_OIDC_RESPONSE, status_code=200) mocked_get.return_value = utils.FakedResponse(content='content', status_code=200) @@ -179,15 +216,21 @@ def test_get_user_forms_failure_no_json(mocked_post, mocked_get, app, connector) @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') def test_get_user_forms_failure_no_json(mocked_post, mocked_get, app, connector): - endpoint = reverse('generic-endpoint', kwargs={ - 'connector': 'arpege-ecp', 'slug': connector.slug, 'endpoint': 'api', 'rest': 'users/nameid/forms'}) + endpoint = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'arpege-ecp', + 'slug': connector.slug, + 'endpoint': 'api', + 'rest': 'users/nameid/forms', + }, + ) demands_response = json.loads(FAKE_USER_DEMANDS_RESPONSE) demands_response['Data']['results'][0]['data_administratives']['date_depot'] = None mocked_post.return_value = utils.FakedResponse(content=FAKE_LOGIN_OIDC_RESPONSE, status_code=200) - mocked_get.return_value = utils.FakedResponse(content=json.dumps(demands_response), - status_code=200) + mocked_get.return_value = utils.FakedResponse(content=json.dumps(demands_response), status_code=200) resp = app.get(endpoint) result = resp.json assert result['err'] == 1 @@ -197,8 +240,15 @@ def test_get_user_forms_failure_no_json(mocked_post, mocked_get, app, connector) @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') def test_get_user_forms_failure_no_token(mocked_post, mocked_get, app, connector): - endpoint = reverse('generic-endpoint', kwargs={ - 'connector': 'arpege-ecp', 'slug': connector.slug, 'endpoint': 'api', 'rest': 'users/nameid/forms'}) + endpoint = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'arpege-ecp', + 'slug': connector.slug, + 'endpoint': 'api', + 'rest': 'users/nameid/forms', + }, + ) token_response = json.loads(FAKE_LOGIN_OIDC_RESPONSE) del token_response['Data']['AccessToken'] @@ -213,8 +263,15 @@ def test_get_user_forms_failure_no_token(mocked_post, mocked_get, app, connector @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') def test_get_user_forms_failure_wrong_token(mocked_post, mocked_get, app, connector): - endpoint = reverse('generic-endpoint', kwargs={ - 'connector': 'arpege-ecp', 'slug': connector.slug, 'endpoint': 'api', 'rest': 'users/nameid/forms'}) + endpoint = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'arpege-ecp', + 'slug': connector.slug, + 'endpoint': 'api', + 'rest': 'users/nameid/forms', + }, + ) token_response = json.loads(FAKE_LOGIN_OIDC_RESPONSE) token_response['Data']['AccessToken'] = None diff --git a/tests/test_astregs.py b/tests/test_astregs.py index d58dfc8c..e6424b10 100644 --- a/tests/test_astregs.py +++ b/tests/test_astregs.py @@ -18,83 +18,97 @@ import utils BASE_URL = 'https://test-ws-astre-gs.departement06.fr/axis2/services/' + def get_xml_file(filename): filepath = os.path.join(os.path.dirname(__file__), 'data', 'astregs', filename) with open(filepath, 'rb') as f: return f.read() + def contact_search_side_effect(wsdl_url, **kwargs): if 'Tiers' in wsdl_url: response_content = get_xml_file('Tiers.xml') else: response_content = get_xml_file('Contact.xml') - return mock.Mock(content=response_content, status_code=200, - headers={'Content-Type': 'text/xml'}) + return mock.Mock(content=response_content, status_code=200, headers={'Content-Type': 'text/xml'}) + def search_wsdl_side_effect(wsdl_url, **kwargs): if 'Tiers' in wsdl_url: response_content = get_xml_file('Tiers.wsdl') else: response_content = get_xml_file('Contact.wsdl') - return mock.Mock(content=response_content, status_code=200, - headers={'Content-Type': 'text/xml'}) + return mock.Mock(content=response_content, status_code=200, headers={'Content-Type': 'text/xml'}) + def contact_wsdl_side_effect(wsdl_url, **kwargs): if 'ContactAdresses' in wsdl_url: response_content = get_xml_file('ContactAdresses.wsdl') else: response_content = get_xml_file('Contact.wsdl') - return mock.Mock(content=response_content, status_code=200, - headers={'Content-Type': 'text/xml'}) + return mock.Mock(content=response_content, status_code=200, headers={'Content-Type': 'text/xml'}) + def contact_side_effect(wsdl_url, **kwargs): if 'ContactAdresses' in wsdl_url: response_content = get_xml_file('ContactAddressCreationResponse.xml') else: response_content = get_xml_file('ContactCreationResponse.xml') - return mock.Mock(content=response_content, status_code=200, - headers={'Content-Type': 'text/xml'}) + return mock.Mock(content=response_content, status_code=200, headers={'Content-Type': 'text/xml'}) + @pytest.fixture def connector(db): - return utils.make_resource(AstreGS, - title='Test', slug='test', - description='test', wsdl_base_url=BASE_URL, - username='CS-FORML', password='secret', - organism='CG06', budget='01', - exercice='2019' + return utils.make_resource( + AstreGS, + title='Test', + slug='test', + description='test', + wsdl_base_url=BASE_URL, + username='CS-FORML', + password='secret', + organism='CG06', + budget='01', + exercice='2019', ) + @pytest.fixture def recherche_tiers_details_wsdl(): return get_xml_file('RechercheTiersDetails.wsdl') + @pytest.fixture def recherche_tiers_details_result(): return get_xml_file('RechercheTiersDetails.xml') + @pytest.fixture def recherche_tiers_details_empty_result(): content = get_xml_file('RechercheTiersDetails.xml') - return force_bytes(re.sub('.*', '', - force_str(content))) + return force_bytes(re.sub('.*', '', force_str(content))) + @pytest.fixture def tiers_creation_response(): return get_xml_file('TiersCreationResponse.xml') + @pytest.fixture def tiers_with_no_siret_creation_response(): return get_xml_file('TiersWithNoSiretCreationResponse.xml') + @pytest.fixture def tiers_creation_error_response(): return get_xml_file('TiersCreationErrorResponse.xml') + @pytest.fixture def tiers_response(): return get_xml_file('TiersResponse.xml') + @pytest.fixture def tiers_error_response(): return get_xml_file('TiersErrorResponse.xml') @@ -102,11 +116,13 @@ def tiers_error_response(): @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_search_association_by_siren(mocked_post, mocked_get, recherche_tiers_details_wsdl, - recherche_tiers_details_result, connector, app): +def test_search_association_by_siren( + mocked_post, mocked_get, recherche_tiers_details_wsdl, recherche_tiers_details_result, connector, app +): mocked_get.return_value = mock.Mock(content=recherche_tiers_details_wsdl) - mocked_post.return_value = mock.Mock(content=recherche_tiers_details_result, status_code=200, - headers={'Content-Type': 'text/xml'}) + mocked_post.return_value = mock.Mock( + content=recherche_tiers_details_result, status_code=200, headers={'Content-Type': 'text/xml'} + ) resp = app.get('/astregs/test/associations', params={'siren': '500433909'}) assert mocked_get.call_args[0][0] == '%sRechercheTiersDetails?wsdl' % BASE_URL assert mocked_post.call_args[0][0] == '%sRechercheTiersDetails/' % BASE_URL @@ -124,13 +140,15 @@ def test_check_association_presence(mocked_post, mocked_get, connector, app): wsdl_content = get_xml_file('RechercheTiers.wsdl') response_content = get_xml_file('RechercheTiers.xml') mocked_get.return_value = mock.Mock(content=wsdl_content) - mocked_post.return_value = mock.Mock(content=response_content, status_code=200, - headers={'Content-Type': 'text/xml'}) + mocked_post.return_value = mock.Mock( + content=response_content, status_code=200, headers={'Content-Type': 'text/xml'} + ) resp = app.get('/astregs/test/check-association-by-siret', params={'siret': '50043390900014'}) assert resp.json['exists'] == True response_content = get_xml_file('RechercheTiersNoResult.xml') - mocked_post.return_value = mock.Mock(content=response_content, status_code=200, - headers={'Content-Type': 'text/xml'}) + mocked_post.return_value = mock.Mock( + content=response_content, status_code=200, headers={'Content-Type': 'text/xml'} + ) resp = app.get('/astregs/test/check-association-by-siret', params={'siret': 'unknown'}) assert resp.json['exists'] == False @@ -141,32 +159,45 @@ def test_association_linking_means(mocked_post, mocked_get, client, connector, a resp = app.get('/astregs/test/get-association-link-means', params={'association_id': '42'}) assert resp.json['already_paired'] == False assert resp.json['data'] == [ - {'id': 'email', 'text': 'par courriel vers fo***@***com', - 'value': 'foo@example.com', 'type': 'email'}, - {'id': 'mobile', 'text': 'par SMS vers 06*****990', - 'value': '0667788990', 'type': 'mobile'} + { + 'id': 'email', + 'text': 'par courriel vers fo***@***com', + 'value': 'foo@example.com', + 'type': 'email', + }, + {'id': 'mobile', 'text': 'par SMS vers 06*****990', 'value': '0667788990', 'type': 'mobile'}, ] assert resp.json['raw_data']['AdresseMail'] == 'foo@example.com' assert resp.json['raw_data']['TelephoneMobile'] == '06 67 78 89 90' Link.objects.create(name_id='user_name_id', association_id='42', resource=connector) - resp = app.get('/astregs/test/get-association-link-means', - params={'association_id': '42', 'NameID': 'user_name_id'}) + resp = app.get( + '/astregs/test/get-association-link-means', params={'association_id': '42', 'NameID': 'user_name_id'} + ) assert resp.json['already_paired'] == True for bad_mobile_number in ('', '01 43 35 01 35', '00 33 7 01 02 03 04', 'letters', '06 01 02'): mocked_post.side_effect = [ - mock.Mock(content=get_xml_file('Tiers.xml'), status_code=200, - headers={'Content-Type': 'text/xml'}), - mock.Mock(content=get_xml_file('Contact.xml').replace( - b'06 67 78 89 90<', - b'%s<' % force_bytes(bad_mobile_number)), - status_code=200, headers={'Content-Type': 'text/xml'}) + mock.Mock( + content=get_xml_file('Tiers.xml'), status_code=200, headers={'Content-Type': 'text/xml'} + ), + mock.Mock( + content=get_xml_file('Contact.xml').replace( + b'06 67 78 89 90<', + b'%s<' % force_bytes(bad_mobile_number), + ), + status_code=200, + headers={'Content-Type': 'text/xml'}, + ), ] resp = app.get('/astregs/test/get-association-link-means', params={'association_id': '42'}) assert resp.json['data'] == [ - {'id': 'email', 'text': 'par courriel vers fo***@***com', - 'value': 'foo@example.com', 'type': 'email'} + { + 'id': 'email', + 'text': 'par courriel vers fo***@***com', + 'value': 'foo@example.com', + 'type': 'email', + } ] assert resp.json['raw_data']['AdresseMail'] == 'foo@example.com' assert resp.json['raw_data']['TelephoneMobile'] == (bad_mobile_number or None) @@ -174,16 +205,24 @@ def test_association_linking_means(mocked_post, mocked_get, client, connector, a @mock.patch('passerelle.utils.Request.get', side_effect=search_wsdl_side_effect) @mock.patch('passerelle.utils.Request.post', side_effect=contact_search_side_effect) -def test_link_user_to_association(mocked_post, mocked_get, client, recherche_tiers_details_wsdl, - recherche_tiers_details_result, connector, app): +def test_link_user_to_association( + mocked_post, + mocked_get, + client, + recherche_tiers_details_wsdl, + recherche_tiers_details_result, + connector, + app, +): assert Link.objects.count() == 0 resp = app.get('/astregs/test/get-association-link-means', params={'association_id': '42'}) assert len(resp.json['data']) == 2 mocked_get.side_effect = None mocked_post.side_effect = None mocked_get.return_value = mock.Mock(content=recherche_tiers_details_wsdl) - mocked_post.return_value = mock.Mock(content=recherche_tiers_details_result, status_code=200, - headers={'Content-Type': 'text/xml'}) + mocked_post.return_value = mock.Mock( + content=recherche_tiers_details_result, status_code=200, headers={'Content-Type': 'text/xml'} + ) resp = app.get('/astregs/test/link', params={'association_id': '42', 'NameID': 'user_name_id'}) assert Link.objects.filter(name_id='user_name_id', association_id='42').count() == 1 link = Link.objects.get(name_id='user_name_id', association_id='42') @@ -197,28 +236,40 @@ def test_link_user_to_association(mocked_post, mocked_get, client, recherche_tie @mock.patch('passerelle.utils.Request.get', side_effect=search_wsdl_side_effect) @mock.patch('passerelle.utils.Request.post', side_effect=contact_search_side_effect) -def test_unlink_user_from_association(mocked_post, mocked_get, connector, recherche_tiers_details_wsdl, - recherche_tiers_details_result, app): +def test_unlink_user_from_association( + mocked_post, mocked_get, connector, recherche_tiers_details_wsdl, recherche_tiers_details_result, app +): resp = app.get('/astregs/test/get-association-link-means', params={'association_id': '42'}) mocked_get.side_effect = None mocked_post.side_effect = None mocked_get.return_value = mock.Mock(content=recherche_tiers_details_wsdl) - mocked_post.return_value = mock.Mock(content=recherche_tiers_details_result, status_code=200, - headers={'Content-Type': 'text/xml'}) + mocked_post.return_value = mock.Mock( + content=recherche_tiers_details_result, status_code=200, headers={'Content-Type': 'text/xml'} + ) resp = app.get('/astregs/test/link', params={'association_id': '42', 'NameID': 'user_name_id'}) resp = app.get('/astregs/test/unlink', params={'NameID': 'user_name_id', 'association_id': '42'}) assert resp.json['deleted'] - resp = app.get('/astregs/test/unlink', params={'NameID': 'user_name_id', 'association_id': '42'}, status=404) + resp = app.get( + '/astregs/test/unlink', params={'NameID': 'user_name_id', 'association_id': '42'}, status=404 + ) @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_list_user_associations(mocked_post, mocked_get, recherche_tiers_details_wsdl, - recherche_tiers_details_result, recherche_tiers_details_empty_result, - connector, app, caplog): +def test_list_user_associations( + mocked_post, + mocked_get, + recherche_tiers_details_wsdl, + recherche_tiers_details_result, + recherche_tiers_details_empty_result, + connector, + app, + caplog, +): mocked_get.return_value = mock.Mock(content=recherche_tiers_details_wsdl) - mocked_post.return_value = mock.Mock(content=recherche_tiers_details_result, status_code=200, - headers={'Content-Type': 'text/xml'}) + mocked_post.return_value = mock.Mock( + content=recherche_tiers_details_result, status_code=200, headers={'Content-Type': 'text/xml'} + ) resp = app.get('/astregs/test/link', params={'association_id': '42', 'NameID': 'user_name_id'}) resp = app.get('/astregs/test/links', params={'NameID': 'user_name_id'}) mocked_get.assert_called() @@ -227,8 +278,9 @@ def test_list_user_associations(mocked_post, mocked_get, recherche_tiers_details assert resp.json['data'][0]['id'] == '42' assert resp.json['data'][0]['text'] == '50043390900016 - ASSOCIATION OMNISPORTS DES MONTS D AZUR' - mocked_post.return_value = mock.Mock(content=recherche_tiers_details_empty_result, status_code=200, - headers={'Content-Type': 'text/xml'}) + mocked_post.return_value = mock.Mock( + content=recherche_tiers_details_empty_result, status_code=200, headers={'Content-Type': 'text/xml'} + ) resp = app.get('/astregs/test/links', params={'NameID': 'user_name_id'}) assert resp.json['data'] assert resp.json['data'][0]['id'] == '42' @@ -240,8 +292,9 @@ def test_list_user_associations(mocked_post, mocked_get, recherche_tiers_details @mock.patch('passerelle.utils.Request.get', side_effect=search_wsdl_side_effect) @mock.patch('passerelle.utils.Request.post') -def test_association_creation(mocked_post, mocked_get, tiers_creation_response, - tiers_creation_error_response, connector, app): +def test_association_creation( + mocked_post, mocked_get, tiers_creation_response, tiers_creation_error_response, connector, app +): payload = { 'CodeFamille': '51', 'CatTiers': '42', @@ -258,10 +311,11 @@ def test_association_creation(mocked_post, mocked_get, tiers_creation_response, 'Sigle': 'EO', 'Financier': 'true', 'AdresseIsAdresseDeFacturation': 'false', - 'AdresseIsAdresseDeCommande': 'false' + 'AdresseIsAdresseDeCommande': 'false', } - mocked_post.return_value = mock.Mock(content=tiers_creation_response, status_code=200, - headers={'Content-Type': 'text/xml'}) + mocked_post.return_value = mock.Mock( + content=tiers_creation_response, status_code=200, headers={'Content-Type': 'text/xml'} + ) resp = app.post_json('/astregs/test/create-association', params=payload) assert resp.json['data'] data = resp.json['data'] @@ -281,9 +335,9 @@ def test_association_creation(mocked_post, mocked_get, tiers_creation_response, assert data['AdresseIsAdresseDeCommande'] == 'false' assert data['AdresseIsAdresseDeFacturation'] == 'false' - mocked_post.return_value = mock.Mock(content=tiers_creation_error_response, - status_code=500, - headers={'Content-Type': 'text/xml'}) + mocked_post.return_value = mock.Mock( + content=tiers_creation_error_response, status_code=500, headers={'Content-Type': 'text/xml'} + ) resp = app.post_json('/astregs/test/create-association', params=payload) assert resp.json['err'] == 1 assert resp.json['err_class'] == 'passerelle.utils.jsonresponse.APIError' @@ -291,11 +345,16 @@ def test_association_creation(mocked_post, mocked_get, tiers_creation_response, assert not resp.json['data'] - @mock.patch('passerelle.utils.Request.get', side_effect=search_wsdl_side_effect) @mock.patch('passerelle.utils.Request.post') -def test_tiers_creation(mocked_post, mocked_get, tiers_with_no_siret_creation_response, - tiers_creation_error_response, connector, app): +def test_tiers_creation( + mocked_post, + mocked_get, + tiers_with_no_siret_creation_response, + tiers_creation_error_response, + connector, + app, +): payload = { 'CodeFamille': '51', 'CatTiers': '42', @@ -313,9 +372,9 @@ def test_tiers_creation(mocked_post, mocked_get, tiers_with_no_siret_creation_re 'AdresseIsAdresseDeFacturation': 'false', 'AdresseIsAdresseDeCommande': 'false', } - mocked_post.return_value = mock.Mock(content=tiers_with_no_siret_creation_response, - status_code=200, - headers={'Content-Type': 'text/xml'}) + mocked_post.return_value = mock.Mock( + content=tiers_with_no_siret_creation_response, status_code=200, headers={'Content-Type': 'text/xml'} + ) resp = app.post_json('/astregs/test/create-association', params=payload) assert resp.json['data'] data = resp.json['data'] @@ -329,10 +388,10 @@ def test_tiers_creation(mocked_post, mocked_get, tiers_with_no_siret_creation_re @mock.patch('passerelle.utils.Request.get', side_effect=search_wsdl_side_effect) @mock.patch('passerelle.utils.Request.post') -def test_get_association_by_id(mocked_post, mocked_get, tiers_response, tiers_error_response, - connector, app): - mocked_post.return_value = mock.Mock(content=tiers_response, status_code=200, - headers={'Content-Type': 'text/xml'}) +def test_get_association_by_id(mocked_post, mocked_get, tiers_response, tiers_error_response, connector, app): + mocked_post.return_value = mock.Mock( + content=tiers_response, status_code=200, headers={'Content-Type': 'text/xml'} + ) resp = app.get('/astregs/test/get-association-by-id', params={'association_id': '487464'}) assert resp.json['data'] @@ -346,17 +405,20 @@ def test_get_association_by_id(mocked_post, mocked_get, tiers_response, tiers_er data['NumeroSiretFin'] == '00024' data['StatutTiers'] == 'PROPOSE' - resp = app.get('/astregs/test/get-association-by-id', - params={'association_id': '487464', - 'NameID': 'user_name_id'}, - status=404) + resp = app.get( + '/astregs/test/get-association-by-id', + params={'association_id': '487464', 'NameID': 'user_name_id'}, + status=404, + ) Link.objects.create(name_id='user_name_id', association_id='487464', resource=connector) - resp = app.get('/astregs/test/get-association-by-id', params={'association_id': '487464', - 'NameID': 'user_name_id'}) + resp = app.get( + '/astregs/test/get-association-by-id', params={'association_id': '487464', 'NameID': 'user_name_id'} + ) assert resp.json['data'] - mocked_post.return_value = mock.Mock(content=tiers_error_response, status_code=500, - headers={'Content-Type': 'text/xml'}) + mocked_post.return_value = mock.Mock( + content=tiers_error_response, status_code=500, headers={'Content-Type': 'text/xml'} + ) resp = app.get('/astregs/test/get-association-by-id', params={'association_id': 'unknown'}, status=200) assert resp.json['err'] == 1 assert resp.json['err_class'] == 'passerelle.utils.jsonresponse.APIError' @@ -368,10 +430,10 @@ def test_get_association_by_id(mocked_post, mocked_get, tiers_response, tiers_er @mock.patch('passerelle.utils.Request.post') def test_get_contact_details(mocked_post, mocked_get, connector, app): mocked_get.return_value = mock.Mock(content=get_xml_file('Contact.wsdl')) - mocked_post.return_value = mock.Mock(content=get_xml_file('ContactResponse.xml'), status_code=200, - headers={'Content-Type': 'text/xml'}) - resp = app.get('/astregs/test/get-contact', - params={'contact_id': '1111'}) + mocked_post.return_value = mock.Mock( + content=get_xml_file('ContactResponse.xml'), status_code=200, headers={'Content-Type': 'text/xml'} + ) + resp = app.get('/astregs/test/get-contact', params={'contact_id': '1111'}) assert resp.json['err'] == 0 assert resp.json['data'] data = resp.json['data'] @@ -384,10 +446,10 @@ def test_get_contact_details(mocked_post, mocked_get, connector, app): assert data['Ville'] == 'DRAP' assert data['LibellePays'] == 'France' - mocked_post.return_value = mock.Mock(content=get_xml_file('ContactResponse.xml'), status_code=500, - headers={'Content-Type': 'text/xml'}) - resp = app.get('/astregs/test/get-contact', - params={'contact_id': '4242'}) + mocked_post.return_value = mock.Mock( + content=get_xml_file('ContactResponse.xml'), status_code=500, headers={'Content-Type': 'text/xml'} + ) + resp = app.get('/astregs/test/get-contact', params={'contact_id': '4242'}) assert not resp.json['data'] assert resp.json['err'] == 1 assert resp.json['err_class'] == 'passerelle.utils.jsonresponse.APIError' @@ -395,8 +457,9 @@ def test_get_contact_details(mocked_post, mocked_get, connector, app): @mock.patch('passerelle.utils.Request.get', side_effect=ConnectionError('mocked error', request=Request())) @mock.patch('passerelle.utils.Request.post', side_effect=NotImplementedError) -def test_low_level_connection_error(mocked_post, mocked_get, tiers_response, tiers_error_response, - connector, app): +def test_low_level_connection_error( + mocked_post, mocked_get, tiers_response, tiers_error_response, connector, app +): resp = app.get('/astregs/test/get-association-by-id', params={'association_id': '487464'}) assert resp.json['err'] == 1 assert resp.json['data'] is None @@ -421,7 +484,7 @@ def test_create_association_contact(mocked_post, mocked_get, connector, app): 'PageWeb': 'http://example.com', 'CodePostal': '06000', 'CodeFonction': '01', - 'EncodeKeyStatut': 'VALIDE' + 'EncodeKeyStatut': 'VALIDE', } resp = app.post_json('/astregs/test/create-contact', params=payload) assert resp.json['data'] @@ -444,9 +507,11 @@ def test_create_association_contact(mocked_post, mocked_get, connector, app): @mock.patch('passerelle.utils.Request.post') def test_delete_association_contact(mocked_post, mocked_get, connector, app): mocked_get.return_value = mock.Mock(content=get_xml_file('Contact.wsdl')) - mocked_post.return_value = mock.Mock(content=get_xml_file('ContactDeletionResponse.xml'), - headers={'Content-Type': 'text/xml'}, - status_code=200) + mocked_post.return_value = mock.Mock( + content=get_xml_file('ContactDeletionResponse.xml'), + headers={'Content-Type': 'text/xml'}, + status_code=200, + ) resp = app.get('/astregs/test/delete-contact', params={'contact_id': '437307'}) assert resp.json['data'] # if contact is deleted the its id is None @@ -457,9 +522,11 @@ def test_delete_association_contact(mocked_post, mocked_get, connector, app): @mock.patch('passerelle.utils.Request.post') def test_create_document(mocked_post, mocked_get, connector, app): mocked_get.return_value = mock.Mock(content=get_xml_file('DocumentAnnexe.wsdl')) - mocked_post.return_value = mock.Mock(content=get_xml_file('DocumentCreationResponse.xml'), - headers={'Content-Type': 'text/xml'}, - status_code=200) + mocked_post.return_value = mock.Mock( + content=get_xml_file('DocumentCreationResponse.xml'), + headers={'Content-Type': 'text/xml'}, + status_code=200, + ) payload = { 'Sujet': 'Test', 'Entite': 'COMMANDE', @@ -473,8 +540,8 @@ def test_create_document(mocked_post, mocked_get, connector, app): 'document': { 'filename': 'test.pdf', 'content_type': 'application/pdf', - 'content': 'base64encodedcontent' - } + 'content': 'base64encodedcontent', + }, } resp = app.post_json('/astregs/test/create-document', params=payload) assert resp.json['data'] @@ -486,9 +553,11 @@ def test_create_document(mocked_post, mocked_get, connector, app): @mock.patch('passerelle.utils.Request.post') def test_create_grant_demand(mocked_post, mocked_get, connector, app): mocked_get.return_value = mock.Mock(content=get_xml_file('Dossier.wsdl')) - mocked_post.return_value = mock.Mock(content=get_xml_file('DossierCreationResponse.xml'), - headers={'Content-Type': 'text/xml'}, - status_code=200) + mocked_post.return_value = mock.Mock( + content=get_xml_file('DossierCreationResponse.xml'), + headers={'Content-Type': 'text/xml'}, + status_code=200, + ) payload = { 'Libelle': 'test grant demand', 'LibelleCourt': 'test', @@ -503,7 +572,7 @@ def test_create_grant_demand(mocked_post, mocked_get, connector, app): resp = app.post_json('/astregs/test/create-grant-demand', params=payload) assert resp.json['data'] data = resp.json['data'] - assert data['CodeDossier'] =='2019_06407' + assert data['CodeDossier'] == '2019_06407' assert data['Exercice'] == '2019' assert data['Libelle'] == 'test grant demand' assert data['LibelleCourt'] == 'test' @@ -525,15 +594,12 @@ def test_create_grant_demand(mocked_post, mocked_get, connector, app): @mock.patch('passerelle.utils.Request.post') def test_create_indana_indicator(mocked_post, mocked_get, connector, app): mocked_get.return_value = mock.Mock(content=get_xml_file('DossierIndicateur.wsdl')) - mocked_post.return_value = mock.Mock(content=get_xml_file('DossierIndicateurCreationResponse.xml'), - headers={'Content-Type': 'text/xml'}, - status_code=200) - payload = { - 'CodeDossier': '2019_06407', - 'CodeInd_1': '501', - 'AnneeInd_1': '2019', - 'ValInd_1': 'O' - } + mocked_post.return_value = mock.Mock( + content=get_xml_file('DossierIndicateurCreationResponse.xml'), + headers={'Content-Type': 'text/xml'}, + status_code=200, + ) + payload = {'CodeDossier': '2019_06407', 'CodeInd_1': '501', 'AnneeInd_1': '2019', 'ValInd_1': 'O'} resp = app.post_json('/astregs/test/create-indana-indicator', params=payload) assert resp.json['err'] == 0 assert resp.json['data'] @@ -549,25 +615,22 @@ def test_create_indana_indicator(mocked_post, mocked_get, connector, app): @mock.patch('passerelle.utils.Request.post') def test_update_indana_indicator(mocked_post, mocked_get, connector, app): mocked_get.return_value = mock.Mock(content=get_xml_file('DossierIndicateur.wsdl')) - mocked_post.return_value = mock.Mock(content=get_xml_file('DossierIndicateurModificationResponse.xml'), - headers={'Content-Type': 'text/xml'}, - status_code=200) - payload = { - 'CodeDossier': '2019_06407', - 'CodeInd_1': '501', - 'AnneeInd_1': '2019', - 'ValInd_1': 'N' - } + mocked_post.return_value = mock.Mock( + content=get_xml_file('DossierIndicateurModificationResponse.xml'), + headers={'Content-Type': 'text/xml'}, + status_code=200, + ) + payload = {'CodeDossier': '2019_06407', 'CodeInd_1': '501', 'AnneeInd_1': '2019', 'ValInd_1': 'N'} resp = app.post_json('/astregs/test/update-indana-indicator', params=payload) assert resp.json == { 'err': 0, 'data': { - 'CodeDossier':'2019_06407', + 'CodeDossier': '2019_06407', 'CodeInd_1': '501', 'AnneeInd_1': '2019', 'ValInd_1': 'N', - 'IndAide': 'Non' - } + 'IndAide': 'Non', + }, } @@ -575,24 +638,22 @@ def test_update_indana_indicator(mocked_post, mocked_get, connector, app): @mock.patch('passerelle.utils.Request.post') def test_delete_indana_indicator(mocked_post, mocked_get, connector, app): mocked_get.return_value = mock.Mock(content=get_xml_file('DossierIndicateur.wsdl')) - mocked_post.return_value = mock.Mock(content=get_xml_file('DossierIndicateurSuppressionResponse.xml'), - headers={'Content-Type': 'text/xml'}, - status_code=200) - payload = { - 'CodeDossier': '2019_06407', - 'CodeInd_1': '501', - 'AnneeInd_1': '2019' - } + mocked_post.return_value = mock.Mock( + content=get_xml_file('DossierIndicateurSuppressionResponse.xml'), + headers={'Content-Type': 'text/xml'}, + status_code=200, + ) + payload = {'CodeDossier': '2019_06407', 'CodeInd_1': '501', 'AnneeInd_1': '2019'} resp = app.post_json('/astregs/test/delete-indana-indicator', params=payload) assert resp.json == { 'err': 0, 'data': { - 'CodeDossier':'2019_06407', + 'CodeDossier': '2019_06407', 'CodeInd_1': '501', 'AnneeInd_1': '2019', 'ValInd_1': 'N', - 'IndAide': 'Non' - } + 'IndAide': 'Non', + }, } @@ -600,9 +661,11 @@ def test_delete_indana_indicator(mocked_post, mocked_get, connector, app): @mock.patch('passerelle.utils.Request.post') def test_create_tiers_rib(mocked_post, mocked_get, connector, app): mocked_get.return_value = mock.Mock(content=get_xml_file('TiersRib.wsdl')) - mocked_post.return_value = mock.Mock(content=get_xml_file('TiersRibCreationResponse.xml'), - headers={'Content-Type': 'text/xml'}, - status_code=200) + mocked_post.return_value = mock.Mock( + content=get_xml_file('TiersRibCreationResponse.xml'), + headers={'Content-Type': 'text/xml'}, + status_code=200, + ) payload = { 'CleIban': '76', 'CodeBic': 'CODEBIC', @@ -615,7 +678,7 @@ def test_create_tiers_rib(mocked_post, mocked_get, connector, app): 'LibelleCourt': 'Test', 'LibellePays': 'FR', 'NumeroIban': 'FR76AABBCCDDEEFFGGHHIIJJKKLLM', - 'CodeDomiciliation': '1' + 'CodeDomiciliation': '1', } resp = app.post_json('/astregs/test/create-tiers-rib', params=payload) @@ -636,13 +699,12 @@ def test_create_tiers_rib(mocked_post, mocked_get, connector, app): @mock.patch('passerelle.utils.Request.post') def test_get_tiers_rib(mocked_post, mocked_get, connector, app): mocked_get.return_value = mock.Mock(content=get_xml_file('TiersRib.wsdl')) - mocked_post.return_value = mock.Mock(content=get_xml_file('TiersRibChargementResponse.xml'), - headers={'Content-Type': 'text/xml'}, - status_code=200) - params = { - 'CodeTiers': '487464', - 'IdRib': '621407' - } + mocked_post.return_value = mock.Mock( + content=get_xml_file('TiersRibChargementResponse.xml'), + headers={'Content-Type': 'text/xml'}, + status_code=200, + ) + params = {'CodeTiers': '487464', 'IdRib': '621407'} resp = app.get('/astregs/test/get-tiers-rib', params=params) assert resp.json['err'] == 0 @@ -661,9 +723,11 @@ def test_get_tiers_rib(mocked_post, mocked_get, connector, app): @mock.patch('passerelle.utils.Request.post') def test_update_tiers_rib(mocked_post, mocked_get, connector, app): mocked_get.return_value = mock.Mock(content=get_xml_file('TiersRib.wsdl')) - mocked_post.return_value = mock.Mock(content=get_xml_file('TiersRibModificationResponse.xml'), - headers={'Content-Type': 'text/xml'}, - status_code=200) + mocked_post.return_value = mock.Mock( + content=get_xml_file('TiersRibModificationResponse.xml'), + headers={'Content-Type': 'text/xml'}, + status_code=200, + ) payload = { 'CleIban': '76', 'CodeBic': 'CODEBIC', @@ -676,13 +740,12 @@ def test_update_tiers_rib(mocked_post, mocked_get, connector, app): 'LibelleCompteEtranger': 'LABEL', 'LibelleCourt': 'New Test', 'LibellePays': 'FR', - 'NumeroIban': 'FR76AABBCCDDEEFFGGHHIIJJKKLLM' + 'NumeroIban': 'FR76AABBCCDDEEFFGGHHIIJJKKLLM', } qs = urlencode({'CodeTiers': '487464', 'IdRib': '621412'}) - resp = app.post_json('/astregs/test/update-tiers-rib?%s' % qs, - params=payload) + resp = app.post_json('/astregs/test/update-tiers-rib?%s' % qs, params=payload) assert resp.json['err'] == 0 assert resp.json['data'] data = resp.json['data'] @@ -694,14 +757,13 @@ def test_update_tiers_rib(mocked_post, mocked_get, connector, app): @mock.patch('passerelle.utils.Request.post') def test_delete_tiers_rib(mocked_post, mocked_get, connector, app): mocked_get.return_value = mock.Mock(content=get_xml_file('TiersRib.wsdl')) - mocked_post.return_value = mock.Mock(content=get_xml_file('TiersRibSuppressionResponse.xml'), - headers={'Content-Type': 'text/xml'}, - status_code=200) + mocked_post.return_value = mock.Mock( + content=get_xml_file('TiersRibSuppressionResponse.xml'), + headers={'Content-Type': 'text/xml'}, + status_code=200, + ) - params = { - 'CodeTiers': '487464', - 'IdRib': '621407' - } + params = {'CodeTiers': '487464', 'IdRib': '621407'} resp = app.get('/astregs/test/delete-tiers-rib', params=params) assert resp.json['err'] == 0 @@ -718,15 +780,12 @@ def test_delete_tiers_rib(mocked_post, mocked_get, connector, app): @mock.patch('passerelle.utils.Request.post') def test_search_tiers_by_rib(mocked_post, mocked_get, connector, app): mocked_get.return_value = mock.Mock(content=get_xml_file('RechercheTiers.wsdl')) - mocked_post.return_value = mock.Mock(content=get_xml_file('RechercheTiersByRibResponse.xml'), - headers={'Content-Type': 'text/xml'}, - status_code=200) - params ={ - 'banque': '30001', - 'guichet': '00794', - 'numero_compte': '12345678901', - 'cle': '85' - } + mocked_post.return_value = mock.Mock( + content=get_xml_file('RechercheTiersByRibResponse.xml'), + headers={'Content-Type': 'text/xml'}, + status_code=200, + ) + params = {'banque': '30001', 'guichet': '00794', 'numero_compte': '12345678901', 'cle': '85'} resp = app.get('/astregs/test/find-tiers-by-rib', params=params) assert resp.json['err'] == 0 @@ -741,15 +800,12 @@ def test_search_tiers_by_rib(mocked_post, mocked_get, connector, app): @mock.patch('passerelle.utils.Request.post') def test_search_tiers_by_rib_no_result(mocked_post, mocked_get, connector, app): mocked_get.return_value = mock.Mock(content=get_xml_file('RechercheTiers.wsdl')) - mocked_post.return_value = mock.Mock(content=get_xml_file('RechercheTiersByRibEmptyResponse.xml'), - headers={'Content-Type': 'text/xml'}, - status_code=200) - params ={ - 'banque': '30001', - 'guichet': '00794', - 'numero_compte': '12345678901', - 'cle': '85' - } + mocked_post.return_value = mock.Mock( + content=get_xml_file('RechercheTiersByRibEmptyResponse.xml'), + headers={'Content-Type': 'text/xml'}, + status_code=200, + ) + params = {'banque': '30001', 'guichet': '00794', 'numero_compte': '12345678901', 'cle': '85'} resp = app.get('/astregs/test/find-tiers-by-rib', params=params) assert resp.json['err'] == 0 diff --git a/tests/test_atal.py b/tests/test_atal.py index 756e13b6..03c93456 100644 --- a/tests/test_atal.py +++ b/tests/test_atal.py @@ -14,23 +14,18 @@ from passerelle.base.models import ApiUser, AccessRight def get_file(filename): - with open( - os.path.join( - os.path.dirname(__file__), 'data', 'atal', filename - ), - 'rb' - ) as f: + with open(os.path.join(os.path.dirname(__file__), 'data', 'atal', filename), 'rb') as f: return f.read() @pytest.fixture() def connector(db): api = ApiUser.objects.create(username='all', keytype='', key='') - connector = ATALConnector.objects.create( - base_soap_url='http://example.atal.com/', slug='slug-atal') + connector = ATALConnector.objects.create(base_soap_url='http://example.atal.com/', slug='slug-atal') obj_type = ContentType.objects.get_for_model(connector) AccessRight.objects.create( - codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=connector.pk) + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=connector.pk + ) return connector @@ -46,15 +41,12 @@ def mock_atal_soap_call(monkeypatch, return_value=None, side_effect=None): class SoapElem(object): - def __init__(self, **kwargs): for attr, value in kwargs.items(): setattr(self, attr, value) -REFS = [ - SoapElem(code='code1', libelle='elem1'), SoapElem(code='code2', libelle='elem2') -] +REFS = [SoapElem(code='code1', libelle='elem1'), SoapElem(code='code2', libelle='elem2')] def test_get_thematique(app, connector, monkeypatch): @@ -73,9 +65,7 @@ def test_get_type_activite(app, connector, monkeypatch): response = app.get('/atal/slug-atal/get-type-activite') assert response.json == { 'err': 0, - 'data': [ - {'text': 'elem1', 'id': 'code1'}, - {'text': 'elem2', 'id': 'code2'}] + 'data': [{'text': 'elem1', 'id': 'code1'}, {'text': 'elem2', 'id': 'code2'}], } call_params = mock_soap_call.call_args.kwargs assert call_params['wsdl'] == 'VilleAgileService' @@ -87,9 +77,7 @@ def test_get_type_de_voie(app, connector, monkeypatch): response = app.get('/atal/slug-atal/get-type-de-voie') assert response.json == { 'err': 0, - 'data': [ - {'text': 'elem1', 'id': 'code1'}, - {'text': 'elem2', 'id': 'code2'}] + 'data': [{'text': 'elem1', 'id': 'code1'}, {'text': 'elem2', 'id': 'code2'}], } call_params = mock_soap_call.call_args.kwargs assert call_params['wsdl'] == 'VilleAgileService' @@ -108,9 +96,7 @@ def test_get_types_equipement(app, connector, monkeypatch): response = app.get('/atal/slug-atal/get-types-equipement') assert response.json == { 'err': 0, - 'data': [ - {'text': 'Espaces Verts', 'id': '2'}, - {'text': 'Voirie', 'id': '4'}] + 'data': [{'text': 'Espaces Verts', 'id': '2'}, {'text': 'Voirie', 'id': '4'}], } call_params = mock_soap_call.call_args.kwargs assert call_params['wsdl'] == 'VilleAgileService' @@ -119,15 +105,9 @@ def test_get_types_equipement(app, connector, monkeypatch): def test_insert_action_comment(app, connector, monkeypatch): mock_soap_call = mock_atal_soap_call(monkeypatch, return_value='DIT19050001') - params = { - 'numero_demande': 'DIT19050001', - 'commentaire': 'aaa' - } + params = {'numero_demande': 'DIT19050001', 'commentaire': 'aaa'} response = app.post_json('/atal/slug-atal/insert-action-comment', params=params) - assert response.json == { - 'err': 0, - 'data': {'demande_number': 'DIT19050001'} - } + assert response.json == {'err': 0, 'data': {'demande_number': 'DIT19050001'}} call_params = mock_soap_call.call_args.kwargs assert call_params['wsdl'] == 'DemandeService' assert call_params['method'] == 'insertActionComment' @@ -137,15 +117,9 @@ def test_insert_action_comment(app, connector, monkeypatch): def test_insert_demande_complet_by_type(app, connector, monkeypatch): mock_soap_call = mock_atal_soap_call(monkeypatch, return_value='DIT19050001') - params = { - 'type_demande': 'VOIRIE', - 'coord_x': 48.866667, 'coord_y': 2.333333 - } + params = {'type_demande': 'VOIRIE', 'coord_x': 48.866667, 'coord_y': 2.333333} response = app.post_json('/atal/slug-atal/insert-demande-complet-by-type', params=params) - assert response.json == { - 'err': 0, - 'data': {'demande_number': 'DIT19050001'} - } + assert response.json == {'err': 0, 'data': {'demande_number': 'DIT19050001'}} call_params = mock_soap_call.call_args.kwargs assert call_params['wsdl'] == 'DemandeService' assert call_params['method'] == 'insertDemandeCompletByType' @@ -156,19 +130,10 @@ def test_insert_demande_complet_by_type(app, connector, monkeypatch): def test_upload(app, connector, monkeypatch): mock_soap_call = mock_atal_soap_call(monkeypatch, return_value=None) - base64_str = 'eyJsYXN0X2NoZWNrIjoiMjAxOS0wNC0xMFQxMjowODoyOVoiL' + \ - 'CJweXBpX3ZlcnNpb24iOiIxOS4wLjMifQ==' - params = { - 'numero_demande': 'DIT19050001', - 'nom_fichier': 'data.json', - 'file': { - 'content': base64_str - } - } + base64_str = 'eyJsYXN0X2NoZWNrIjoiMjAxOS0wNC0xMFQxMjowODoyOVoiL' + 'CJweXBpX3ZlcnNpb24iOiIxOS4wLjMifQ==' + params = {'numero_demande': 'DIT19050001', 'nom_fichier': 'data.json', 'file': {'content': base64_str}} response = app.post_json('/atal/slug-atal/upload', params=params) - assert response.json == { - 'err': 0 - } + assert response.json == {'err': 0} call_params = mock_soap_call.call_args.kwargs assert call_params['wsdl'] == 'ChargementPiecesJointesService' assert call_params['method'] == 'upload' @@ -179,43 +144,34 @@ def test_upload(app, connector, monkeypatch): params = { 'numero_demande': 'DIT19050001', 'nom_fichier': 'data.json', - 'file': { - 'content': 'invalidbase64' - } + 'file': {'content': 'invalidbase64'}, } response = app.post_json('/atal/slug-atal/upload', params=params) assert response.json == { 'data': None, 'err': 1, 'err_class': 'passerelle.utils.jsonresponse.APIError', - 'err_desc': 'Invalid base64 string' + 'err_desc': 'Invalid base64 string', } # empty file - params = { - 'numero_demande': 'DIT19050001', - 'nom_fichier': 'data.json', - 'file': {} - } + params = {'numero_demande': 'DIT19050001', 'nom_fichier': 'data.json', 'file': {}} response = app.post_json('/atal/slug-atal/upload', params=params, status=400) assert response.json == { 'data': None, 'err': 1, 'err_class': 'passerelle.utils.jsonresponse.APIError', - 'err_desc': "file: 'content' is a required property" + 'err_desc': "file: 'content' is a required property", } # no file - params = { - 'numero_demande': 'DIT19050001', - 'nom_fichier': 'data.json' - } + params = {'numero_demande': 'DIT19050001', 'nom_fichier': 'data.json'} response = app.post_json('/atal/slug-atal/upload', params=params, status=400) assert response.json == { 'data': None, 'err': 1, 'err_class': 'passerelle.utils.jsonresponse.APIError', - 'err_desc': "'file' is a required property" + 'err_desc': "'file' is a required property", } @@ -223,14 +179,14 @@ def test_retrieve_details_demande(app, connector, monkeypatch): import passerelle.utils wsdl_response = mock.Mock( - content=get_file('DemandeService.wsdl'), status_code=200, - headers={'Content-Type': 'text/xml'} + content=get_file('DemandeService.wsdl'), status_code=200, headers={'Content-Type': 'text/xml'} ) monkeypatch.setattr(passerelle.utils.Request, 'get', mock.Mock(return_value=wsdl_response)) api_response = mock.Mock( - content=get_file('details_demande_response.xml') % b'EN ATTENTE', status_code=200, - headers={'Content-Type': 'text/xml'} + content=get_file('details_demande_response.xml') % b'EN ATTENTE', + status_code=200, + headers={'Content-Type': 'text/xml'}, ) monkeypatch.setattr(passerelle.utils.Request, 'post', mock.Mock(return_value=api_response)) @@ -243,14 +199,14 @@ def test_retrieve_etat_travaux(app, connector, monkeypatch): import passerelle.utils wsdl_response = mock.Mock( - content=get_file('DemandeService.wsdl'), status_code=200, - headers={'Content-Type': 'text/xml'} + content=get_file('DemandeService.wsdl'), status_code=200, headers={'Content-Type': 'text/xml'} ) monkeypatch.setattr(passerelle.utils.Request, 'get', mock.Mock(return_value=wsdl_response)) api_response = mock.Mock( - content=get_file('etat_travaux_response.xml') % b'travaux pas commences', status_code=200, - headers={'Content-Type': 'text/xml'} + content=get_file('etat_travaux_response.xml') % b'travaux pas commences', + status_code=200, + headers={'Content-Type': 'text/xml'}, ) monkeypatch.setattr(passerelle.utils.Request, 'post', mock.Mock(return_value=api_response)) @@ -263,14 +219,14 @@ def test_infos(app, connector, monkeypatch): import passerelle.utils wsdl_response = mock.Mock( - content=get_file('DemandeService.wsdl'), status_code=200, - headers={'Content-Type': 'text/xml'} + content=get_file('DemandeService.wsdl'), status_code=200, headers={'Content-Type': 'text/xml'} ) monkeypatch.setattr(passerelle.utils.Request, 'get', mock.Mock(return_value=wsdl_response)) api_response = mock.Mock( - content=get_file('details_demande_response.xml') % b'EN ATTENTE', status_code=200, - headers={'Content-Type': 'text/xml'} + content=get_file('details_demande_response.xml') % b'EN ATTENTE', + status_code=200, + headers={'Content-Type': 'text/xml'}, ) monkeypatch.setattr(passerelle.utils.Request, 'post', mock.Mock(return_value=api_response)) response = app.get('/atal/slug-atal/infos/DIT18050001/') @@ -279,12 +235,14 @@ def test_infos(app, connector, monkeypatch): assert response.json['data']['status'] == 'EN ATTENTE' api_response1 = mock.Mock( - content=get_file('details_demande_response.xml') % b'PRISE EN COMPTE', status_code=200, - headers={'Content-Type': 'text/xml'} + content=get_file('details_demande_response.xml') % b'PRISE EN COMPTE', + status_code=200, + headers={'Content-Type': 'text/xml'}, ) api_response2 = mock.Mock( - content=get_file('etat_travaux_response.xml') % b'travaux pas commences', status_code=200, - headers={'Content-Type': 'text/xml'} + content=get_file('etat_travaux_response.xml') % b'travaux pas commences', + status_code=200, + headers={'Content-Type': 'text/xml'}, ) monkeypatch.setattr( passerelle.utils.Request, 'post', mock.Mock(side_effect=[api_response1, api_response2]) @@ -296,12 +254,14 @@ def test_infos(app, connector, monkeypatch): # User comments in response api_response1 = mock.Mock( - content=get_file('details_demande_response_with_comments.xml'), status_code=200, - headers={'Content-Type': 'text/xml'} + content=get_file('details_demande_response_with_comments.xml'), + status_code=200, + headers={'Content-Type': 'text/xml'}, ) api_response2 = mock.Mock( - content=get_file('etat_travaux_response.xml') % b'travaux pas commences', status_code=200, - headers={'Content-Type': 'text/xml'} + content=get_file('etat_travaux_response.xml') % b'travaux pas commences', + status_code=200, + headers={'Content-Type': 'text/xml'}, ) monkeypatch.setattr( passerelle.utils.Request, 'post', mock.Mock(side_effect=[api_response1, api_response2]) @@ -311,10 +271,7 @@ def test_infos(app, connector, monkeypatch): assert response.json['err'] == 0 data = response.json['data'] assert data['status'] == 'travaux pas commences' - assert data['works_comment'] == { - 'text': 'bonjour atal', - 'date': 'Thursday 24 October 2019, 16:51' - } + assert data['works_comment'] == {'text': 'bonjour atal', 'date': 'Thursday 24 October 2019, 16:51'} assert data['works_comments'] == [] assert data['demand_comment'] is None assert data['works_status'] is None @@ -326,14 +283,8 @@ def test_infos(app, connector, monkeypatch): response = app.get('/atal/slug-atal/infos/DIT18050001/?full=true') data = response.json['data'] assert len(data['works_comments']) == 2 - assert data['works_comments'][0] == { - 'text': 'OK', - 'date': 'Thursday 24 October 2019, 16:48' - } - last_comment = { - 'text': 'bonjour atal', - 'date': 'Thursday 24 October 2019, 16:51' - } + assert data['works_comments'][0] == {'text': 'OK', 'date': 'Thursday 24 October 2019, 16:48'} + last_comment = {'text': 'bonjour atal', 'date': 'Thursday 24 October 2019, 16:51'} assert data['works_comments'][1] == last_comment assert data['works_comment'] == last_comment @@ -342,27 +293,24 @@ def test_new_comments(app, connector, monkeypatch): import passerelle.utils wsdl_response = mock.Mock( - content=get_file('DemandeService.wsdl'), status_code=200, - headers={'Content-Type': 'text/xml'} + content=get_file('DemandeService.wsdl'), status_code=200, headers={'Content-Type': 'text/xml'} ) monkeypatch.setattr(passerelle.utils.Request, 'get', mock.Mock(return_value=wsdl_response)) api_response = mock.Mock( - content=get_file('details_demande_response_with_comments.xml'), status_code=200, - headers={'Content-Type': 'text/xml'} + content=get_file('details_demande_response_with_comments.xml'), + status_code=200, + headers={'Content-Type': 'text/xml'}, ) monkeypatch.setattr(passerelle.utils.Request, 'post', mock.Mock(return_value=api_response)) all_comments = [ + {'text': 'OK', 'date': 'Thursday 24 October 2019, 16:48', 'date_raw': '2019-10-24T16:48:34+02:00'}, { - 'text': 'OK', - 'date': 'Thursday 24 October 2019, 16:48', - 'date_raw': '2019-10-24T16:48:34+02:00' - }, { 'text': 'bonjour atal', 'date': 'Thursday 24 October 2019, 16:51', - 'date_raw': '2019-10-24T16:51:37+02:00' - } + 'date_raw': '2019-10-24T16:51:37+02:00', + }, ] last_datetime = datetime(year=2019, month=10, day=23) diff --git a/tests/test_atos_genesys.py b/tests/test_atos_genesys.py index 6e3eeb77..592ca1f1 100644 --- a/tests/test_atos_genesys.py +++ b/tests/test_atos_genesys.py @@ -15,19 +15,15 @@ FAKE_URL = 'https://sirus.fr/' @pytest.fixture def genesys(db): return utils.make_resource( - Resource, - title='Test 1', - slug='test1', - description='Connecteur de test', - webservice_base_url=FAKE_URL) + Resource, title='Test 1', slug='test1', description='Connecteur de test', webservice_base_url=FAKE_URL + ) @pytest.fixture def mock_codifications_ok(): - response = open(os.path.join( - os.path.dirname(__file__), - 'data', - 'genesys_select_codifications.xml')).read() + response = open( + os.path.join(os.path.dirname(__file__), 'data', 'genesys_select_codifications.xml') + ).read() with utils.mock_url(FAKE_URL, response) as mock: yield mock @@ -36,11 +32,15 @@ def test_base_url_normalization(db): # db is necessary because Resource.__init__ set resource.logger which does DB queries :/ resource = Resource(title='t', slug='t', description='t') resource.webservice_base_url = 'http://localhost/WSUsagerPublik/services/PublikService/' - assert (resource.select_usager_by_ref_url - == 'http://localhost/WSUsagerPublik/services/PublikService/selectUsagerByRef') + assert ( + resource.select_usager_by_ref_url + == 'http://localhost/WSUsagerPublik/services/PublikService/selectUsagerByRef' + ) resource.webservice_base_url = 'http://localhost/' - assert (resource.select_usager_by_ref_url - == 'http://localhost/WSUsagerPublik/services/PublikService/selectUsagerByRef') + assert ( + resource.select_usager_by_ref_url + == 'http://localhost/WSUsagerPublik/services/PublikService/selectUsagerByRef' + ) def test_ws_categories(app, genesys, mock_codifications_ok): @@ -62,6 +62,7 @@ def test_ws_codifications(app, genesys, mock_codifications_ok): def test_ws_codifications_failure(app, genesys, mock_500): from django.core.cache import cache + cache.clear() url = utils.generic_endpoint_url('atos-genesys', 'codifications', slug=genesys.slug) @@ -79,12 +80,13 @@ RESPONSE_UNKNOWN_LOGIN = ''' def test_ws_link_unknown_appairage(app, genesys): url = utils.generic_endpoint_url('atos-genesys', 'link', slug=genesys.slug) with utils.mock_url(FAKE_URL, RESPONSE_UNKNOWN_LOGIN): - response = app.post(url + '?' + urlencode({ - 'NameID': 'zob', - 'login': '1234', - 'password': 'xyz', - 'email': 'john.doe@example.com' - })) + response = app.post( + url + + '?' + + urlencode( + {'NameID': 'zob', 'login': '1234', 'password': 'xyz', 'email': 'john.doe@example.com'} + ) + ) assert response.json['err'] == 1 assert response.json['data']['code'] == '6' assert response.json['data']['label'] @@ -101,51 +103,66 @@ def test_ws_link_created(app, genesys): url = utils.generic_endpoint_url('atos-genesys', 'link', slug=genesys.slug) assert Link.objects.count() == 0 with utils.mock_url(FAKE_URL, RESPONSE_CREATED): - response = app.post(url + '?' + urlencode({ - 'NameID': 'zob', - 'login': '1234', - 'password': 'xyz', - 'email': 'john.doe@example.com' - })) + response = app.post( + url + + '?' + + urlencode( + {'NameID': 'zob', 'login': '1234', 'password': 'xyz', 'email': 'john.doe@example.com'} + ) + ) link = Link.objects.latest('pk') assert response.json['err'] == 0 assert response.json['link_id'] == link.pk assert response.json['new'] - assert Link.objects.filter( - name_id='zob', id_per='789', resource=genesys).count() == 1 + assert Link.objects.filter(name_id='zob', id_per='789', resource=genesys).count() == 1 with utils.mock_url(FAKE_URL, RESPONSE_SELECT_USAGER): dossiers_url = utils.generic_endpoint_url('atos-genesys', 'dossiers', slug=genesys.slug) - response = app.get(dossiers_url + '?' + urlencode({ - 'NameID': 'zob', - })) + response = app.get( + dossiers_url + + '?' + + urlencode( + { + 'NameID': 'zob', + } + ) + ) url = utils.generic_endpoint_url('atos-genesys', 'unlink', slug=genesys.slug) - response = app.post(url + '?' + urlencode({ - 'NameID': 'zob', - 'link_id': response.json['data'][0]['id'], - })) + response = app.post( + url + + '?' + + urlencode( + { + 'NameID': 'zob', + 'link_id': response.json['data'][0]['id'], + } + ) + ) assert response.json['err'] == 0 assert response.json['deleted'] == 1 assert Link.objects.count() == 0 -RESPONSE_SELECT_USAGER = open(os.path.join( - os.path.dirname(__file__), - 'data', - 'genesys_select_usager.xml')).read() + +RESPONSE_SELECT_USAGER = open( + os.path.join(os.path.dirname(__file__), 'data', 'genesys_select_usager.xml') +).read() def test_ws_dossiers(app, genesys): - link = Link.objects.create( - resource=genesys, - name_id='zob', - id_per='1234') + link = Link.objects.create(resource=genesys, name_id='zob', id_per='1234') url = utils.generic_endpoint_url('atos-genesys', 'dossiers', slug=genesys.slug) with utils.mock_url(FAKE_URL, RESPONSE_SELECT_USAGER): - response = app.get(url + '?' + urlencode({ - 'NameID': 'zob', - })) + response = app.get( + url + + '?' + + urlencode( + { + 'NameID': 'zob', + } + ) + ) assert response.json['err'] == 0 assert response.json['data'] assert len(response.json['data']) == 1 @@ -159,15 +176,18 @@ def test_ws_dossiers(app, genesys): assert len(response.json['data'][0]['dossier']['DROITS']) == 1 assert len(response.json['data'][0]['dossier']['DROITS']['PH']) == 1 - link2 = Link.objects.create( - resource=genesys, - name_id='zob', - id_per='4567') + link2 = Link.objects.create(resource=genesys, name_id='zob', id_per='4567') with utils.mock_url(FAKE_URL, RESPONSE_SELECT_USAGER): - response = app.get(url + '?' + urlencode({ - 'NameID': 'zob', - })) + response = app.get( + url + + '?' + + urlencode( + { + 'NameID': 'zob', + } + ) + ) assert response.json['err'] == 0 assert response.json['data'] assert len(response.json['data']) == 2 @@ -181,10 +201,16 @@ def test_ws_dossiers(app, genesys): assert response.json['data'][1]['text'] == u'%s - John DOE' % link2.id_per with utils.mock_url(FAKE_URL, RESPONSE_SELECT_USAGER): - response = app.get(url + '?' + urlencode({ - 'NameID': 'zob', - 'link_id': link2.id, - })) + response = app.get( + url + + '?' + + urlencode( + { + 'NameID': 'zob', + 'link_id': link2.id, + } + ) + ) assert response.json['err'] == 0 assert response.json['data'] assert response.json['data']['id_per'] == '4567' @@ -198,10 +224,7 @@ def test_row_locked_cache(genesys, freezer): from passerelle.apps.atos_genesys.utils import RowLockedCache freezer.move_to('2018-01-01 00:00:00') - link = Link.objects.create( - resource=genesys, - name_id='zob', - id_per='4567') + link = Link.objects.create(resource=genesys, name_id='zob', id_per='4567') class F(object): calls = 0 @@ -211,6 +234,7 @@ def test_row_locked_cache(genesys, freezer): time.sleep(0.05) self.calls += 1 return self.value + f = F() # Check that cache works, f() is called only one time during the cache duration (60 seconds) @@ -232,6 +256,7 @@ def test_row_locked_cache(genesys, freezer): assert rlc() == 2 assert f.calls == 2 + RESPONSE_SEARCH = ''' John @@ -282,15 +307,24 @@ RESPONSE_SELECT_USAGER_NO_CONTACTS = ''' def test_ws_search(app, genesys): url = utils.generic_endpoint_url('atos-genesys', 'search', slug=genesys.slug) - with utils.mock_url(FAKE_URL + 'WSUsagerPublik/services/PublikService/chercheBeneficiaire', RESPONSE_SEARCH): - with utils.mock_url(FAKE_URL + 'WSUsagerPublik/services/PublikService/selectUsager', - RESPONSE_SELECT_USAGER): - response = app.get(url + '?' + urlencode({ - 'first_name': 'John', - 'last_name': 'Doe', - 'date_of_birth': '1925-01-01', - 'commune_naissance': 'NïCe', - })) + with utils.mock_url( + FAKE_URL + 'WSUsagerPublik/services/PublikService/chercheBeneficiaire', RESPONSE_SEARCH + ): + with utils.mock_url( + FAKE_URL + 'WSUsagerPublik/services/PublikService/selectUsager', RESPONSE_SELECT_USAGER + ): + response = app.get( + url + + '?' + + urlencode( + { + 'first_name': 'John', + 'last_name': 'Doe', + 'date_of_birth': '1925-01-01', + 'commune_naissance': 'NïCe', + } + ) + ) assert response.json['err'] == 0 assert response.json['already_paired'] is False assert response.json['link_id'] is None @@ -304,7 +338,8 @@ def test_ws_search(app, genesys): u'nom_naissance': u'TEST', u'phone': u'0655555555', u'prenom': u'John', - u'text': u'par SMS vers 06*****555'}, + u'text': u'par SMS vers 06*****555', + }, { u'id': u'tel2', u'id_per': u'1234', @@ -312,7 +347,7 @@ def test_ws_search(app, genesys): u'nom_naissance': u'TEST', u'phone': u'0644444444', u'prenom': u'John', - u'text': u'par SMS vers 06*****444' + u'text': u'par SMS vers 06*****444', }, { u'email': u'test@sirus.fr', @@ -321,42 +356,69 @@ def test_ws_search(app, genesys): u'nom': u'DOE', u'nom_naissance': u'TEST', u'prenom': u'John', - u'text': u'par courriel vers te***@***.fr' - } + u'text': u'par courriel vers te***@***.fr', + }, ] - with utils.mock_url(FAKE_URL + 'WSUsagerPublik/services/PublikService/chercheBeneficiaire', RESPONSE_SEARCH): - with utils.mock_url(FAKE_URL + 'WSUsagerPublik/services/PublikService/selectUsager', - RESPONSE_SELECT_USAGER): - response = app.get(url + '?' + urlencode({ - 'first_name': 'John', - 'last_name': 'Doe', - 'date_of_birth': '1925-01-01', - 'commune_naissance': 'Cassis', - })) + with utils.mock_url( + FAKE_URL + 'WSUsagerPublik/services/PublikService/chercheBeneficiaire', RESPONSE_SEARCH + ): + with utils.mock_url( + FAKE_URL + 'WSUsagerPublik/services/PublikService/selectUsager', RESPONSE_SELECT_USAGER + ): + response = app.get( + url + + '?' + + urlencode( + { + 'first_name': 'John', + 'last_name': 'Doe', + 'date_of_birth': '1925-01-01', + 'commune_naissance': 'Cassis', + } + ) + ) assert response.json['err'] == 1 assert response.json['err_desc'] == 'not-found' - with utils.mock_url(FAKE_URL + 'WSUsagerPublik/services/PublikService/chercheBeneficiaire', RESPONSE_SEARCH): - with utils.mock_url(FAKE_URL + 'WSUsagerPublik/services/PublikService/selectUsager', - RESPONSE_SELECT_USAGER): - response = app.get(url + '?' + urlencode({ - 'first_name': 'John', - 'last_name': 'Doe', - 'date_of_birth': '1925-01-02', - })) + with utils.mock_url( + FAKE_URL + 'WSUsagerPublik/services/PublikService/chercheBeneficiaire', RESPONSE_SEARCH + ): + with utils.mock_url( + FAKE_URL + 'WSUsagerPublik/services/PublikService/selectUsager', RESPONSE_SELECT_USAGER + ): + response = app.get( + url + + '?' + + urlencode( + { + 'first_name': 'John', + 'last_name': 'Doe', + 'date_of_birth': '1925-01-02', + } + ) + ) assert response.json['err'] == 1 assert response.json['err_desc'] == 'not-found' - with utils.mock_url(FAKE_URL + 'WSUsagerPublik/services/PublikService/chercheBeneficiaire', - RESPONSE_SEARCH): - with utils.mock_url(FAKE_URL + 'WSUsagerPublik/services/PublikService/selectUsager', - RESPONSE_SELECT_USAGER_NO_CONTACTS): - response = app.get(url + '?' + urlencode({ - 'first_name': 'John', - 'last_name': 'Doe', - 'date_of_birth': '1925-01-01', - })) + with utils.mock_url( + FAKE_URL + 'WSUsagerPublik/services/PublikService/chercheBeneficiaire', RESPONSE_SEARCH + ): + with utils.mock_url( + FAKE_URL + 'WSUsagerPublik/services/PublikService/selectUsager', + RESPONSE_SELECT_USAGER_NO_CONTACTS, + ): + response = app.get( + url + + '?' + + urlencode( + { + 'first_name': 'John', + 'last_name': 'Doe', + 'date_of_birth': '1925-01-01', + } + ) + ) assert response.json['err'] == 1 assert response.json['err_desc'] == 'no-contacts' @@ -365,12 +427,19 @@ def test_ws_link_by_id_per(app, genesys): url = utils.generic_endpoint_url('atos-genesys', 'link-by-id-per', slug=genesys.slug) assert Link.objects.count() == 0 - with utils.mock_url(FAKE_URL + 'WSUsagerPublik/services/PublikService/selectUsager', - RESPONSE_SELECT_USAGER): - response = app.post(url + '?' + urlencode({ - 'NameID': 'zob', - 'id_per': '1234', - })) + with utils.mock_url( + FAKE_URL + 'WSUsagerPublik/services/PublikService/selectUsager', RESPONSE_SELECT_USAGER + ): + response = app.post( + url + + '?' + + urlencode( + { + 'NameID': 'zob', + 'id_per': '1234', + } + ) + ) assert response.json['err'] == 0 assert Link.objects.count() == 1 @@ -381,15 +450,24 @@ def test_ws_link_by_id_per(app, genesys): url = utils.generic_endpoint_url('atos-genesys', 'search', slug=genesys.slug) - with utils.mock_url(FAKE_URL + 'WSUsagerPublik/services/PublikService/chercheBeneficiaire', RESPONSE_SEARCH): - with utils.mock_url(FAKE_URL + 'WSUsagerPublik/services/PublikService/selectUsager', - RESPONSE_SELECT_USAGER): - response = app.get(url + '?' + urlencode({ - 'first_name': 'John', - 'last_name': 'Doe', - 'date_of_birth': '1925-01-01', - 'NameID': 'zob', - })) + with utils.mock_url( + FAKE_URL + 'WSUsagerPublik/services/PublikService/chercheBeneficiaire', RESPONSE_SEARCH + ): + with utils.mock_url( + FAKE_URL + 'WSUsagerPublik/services/PublikService/selectUsager', RESPONSE_SELECT_USAGER + ): + response = app.get( + url + + '?' + + urlencode( + { + 'first_name': 'John', + 'last_name': 'Doe', + 'date_of_birth': '1925-01-01', + 'NameID': 'zob', + } + ) + ) assert response.json['err'] == 0 assert response.json['already_paired'] is True assert response.json['link_id'] == link.id diff --git a/tests/test_availability.py b/tests/test_availability.py index bb0bd75f..e7a5a7a2 100644 --- a/tests/test_availability.py +++ b/tests/test_availability.py @@ -45,18 +45,22 @@ FEED_EXAMPLE = u""" def connector(db): return utils.setup_access_rights(Feed.objects.create(slug='test', url='http://example.net/')) + @all_requests def up_mock(url, request): return {'status_code': 200, 'content': FEED_EXAMPLE, 'request': request} + @all_requests def down_mock(url, request): return {'status_code': 404, 'content': 'down', 'request': request} + @all_requests def down_500_mock(url, request): return {'status_code': 500, 'content': 'down', 'request': request} + def test_feed_availability(app, connector): assert connector.get_availability_status() is None diff --git a/tests/test_base_adresse.py b/tests/test_base_adresse.py index db43e50e..e5f2f896 100644 --- a/tests/test_base_adresse.py +++ b/tests/test_base_adresse.py @@ -13,37 +13,42 @@ from django.core.management import call_command from django.core.management.base import CommandError from django.utils.six.moves.urllib.parse import urljoin -from passerelle.apps.base_adresse.models import (BaseAdresse, StreetModel, CityModel, - DepartmentModel, RegionModel, AddressCacheModel) +from passerelle.apps.base_adresse.models import ( + BaseAdresse, + StreetModel, + CityModel, + DepartmentModel, + RegionModel, + AddressCacheModel, +) -FAKED_CONTENT = json.dumps({ - "limit": 1, - "attribution": "BAN", - "version": "draft", - "licence": "ODbL 1.0", - "query": "plop", - "type": "FeatureCollection", - "features": [ - { - "geometry": { - "type": "Point", - "coordinates": [-0.593775, 47.474633] - }, - "properties": { - "citycode": "49007", - "name": "Rue Roger Halope", - "id": "49007_6950_be54bd", - "city": "Angers", - "context": "49, Maine-et-Loire, Pays de la Loire", - "score": 0.14097272727272728, - "label": "Rue Roger Halope 49000 Angers", - "postcode": "49000", - "type": "street" - }, - "type": "Feature" - } - ] -}) +FAKED_CONTENT = json.dumps( + { + "limit": 1, + "attribution": "BAN", + "version": "draft", + "licence": "ODbL 1.0", + "query": "plop", + "type": "FeatureCollection", + "features": [ + { + "geometry": {"type": "Point", "coordinates": [-0.593775, 47.474633]}, + "properties": { + "citycode": "49007", + "name": "Rue Roger Halope", + "id": "49007_6950_be54bd", + "city": "Angers", + "context": "49, Maine-et-Loire, Pays de la Loire", + "score": 0.14097272727272728, + "label": "Rue Roger Halope 49000 Angers", + "postcode": "49000", + "type": "street", + }, + "type": "Feature", + } + ], + } +) FAKE_DATA = '' @@ -59,41 +64,25 @@ FAKE_API_GEO_LIST = [ "nom": "Paris", "population": 2190327, }, - { - "code": "97501", - "codesPostaux": [ - "97500" - ], - "nom": "Miquelon-Langlade", - "population": 596 - } + {"code": "97501", "codesPostaux": ["97500"], "nom": "Miquelon-Langlade", "population": 596}, ] FAKE_API_GEO = json.dumps(FAKE_API_GEO_LIST) -FAKE_API_GEO_DEPARTMENTS = json.dumps([ - { - "code": "75", - "codeRegion": "11", - "nom": "Paris" - }, - { - "code": "58", - "codeRegion": "27", - "nom": "Nièvre", - } -]) +FAKE_API_GEO_DEPARTMENTS = json.dumps( + [ + {"code": "75", "codeRegion": "11", "nom": "Paris"}, + { + "code": "58", + "codeRegion": "27", + "nom": "Nièvre", + }, + ] +) -FAKE_API_GEO_REGIONS = json.dumps([ - { - "code": "11", - "nom": "Île-de-France" - }, - { - "code": "27", - "nom": "Bourgogne-Franche-Comté" - } -]) +FAKE_API_GEO_REGIONS = json.dumps( + [{"code": "11", "nom": "Île-de-France"}, {"code": "27", "nom": "Bourgogne-Franche-Comté"}] +) @pytest.fixture @@ -103,35 +92,33 @@ def base_adresse(db): @pytest.fixture def base_adresse_97x(db): - return utils.setup_access_rights(BaseAdresse.objects.create(slug='base-adresse', - zipcode='97425')) + return utils.setup_access_rights(BaseAdresse.objects.create(slug='base-adresse', zipcode='97425')) @pytest.fixture def base_adresse_corsica(db): - return utils.setup_access_rights(BaseAdresse.objects.create(slug='base-adresse', - zipcode='20000, 20100 ')) + return utils.setup_access_rights(BaseAdresse.objects.create(slug='base-adresse', zipcode='20000, 20100 ')) @pytest.fixture def base_adresse_multiple(db): - return utils.setup_access_rights(BaseAdresse.objects.create(slug='base-adresse', - zipcode='73, 73100, 97425,20000 ')) + return utils.setup_access_rights( + BaseAdresse.objects.create(slug='base-adresse', zipcode='73, 73100, 97425,20000 ') + ) @pytest.fixture def base_adresse_coordinates(db): - return utils.setup_access_rights(BaseAdresse.objects.create(slug='base-adresse', - latitude=1.2, longitude=2.1)) + return utils.setup_access_rights( + BaseAdresse.objects.create(slug='base-adresse', latitude=1.2, longitude=2.1) + ) @pytest.fixture def street(db): - return StreetModel.objects.create(city=u'Chambéry', - name=u'Une rüê très äccentuéè', - zipcode=u'73000', - type=u'street', - citycode=u'73001') + return StreetModel.objects.create( + city=u'Chambéry', name=u'Une rüê très äccentuéè', zipcode=u'73000', type=u'street', citycode=u'73001' + ) @pytest.fixture @@ -146,27 +133,34 @@ def department(db, region): @pytest.fixture def city(db, region, department): - return CityModel.objects.create(name=u'Chambéry', code='73065', zipcode='73000', - population=42000, region=region, department=department) + return CityModel.objects.create( + name=u'Chambéry', + code='73065', + zipcode='73000', + population=42000, + region=region, + department=department, + ) @pytest.fixture def miquelon(db): - return CityModel.objects.create(name=u'Miquelon-Langlade', code='97501', zipcode='97500', - population=42) + return CityModel.objects.create(name=u'Miquelon-Langlade', code='97501', zipcode='97500', population=42) @pytest.fixture def mock_update_api_geo(): - with mock.patch('passerelle.apps.base_adresse.models.BaseAdresse.update_api_geo_data', - new=lambda x: None) as _fixture: + with mock.patch( + 'passerelle.apps.base_adresse.models.BaseAdresse.update_api_geo_data', new=lambda x: None + ) as _fixture: yield _fixture @pytest.fixture def mock_update_streets(): - with mock.patch('passerelle.apps.base_adresse.models.BaseAdresse.update_streets_data', - new=lambda x: None) as _fixture: + with mock.patch( + 'passerelle.apps.base_adresse.models.BaseAdresse.update_streets_data', new=lambda x: None + ) as _fixture: yield _fixture @@ -224,8 +218,7 @@ def test_base_adresse_search_qs_empty(app, base_adresse, mock_api_adresse_data_g assert len(resp.json) == 0 -def test_base_adresse_search_qs_parameters_error(app, base_adresse, - mock_api_adresse_data_gouv_fr_search): +def test_base_adresse_search_qs_parameters_error(app, base_adresse, mock_api_adresse_data_gouv_fr_search): resp = app.get('/base-adresse/%s/search' % base_adresse.slug, status=400) assert resp.json['err'] == 1 assert resp.json['err_class'] == 'passerelle.views.WrongParameter' @@ -235,8 +228,10 @@ def test_base_adresse_search_qs_parameters_error(app, base_adresse, assert resp.json['err'] == 1 assert 'coin' in resp.json['err_desc'] # signature and format are ignored - app.get('/base-adresse/%s/streets?zipcode=13400&signature=zz&format=jsonp' - '&raise=1&jsonpCallback=f' % base_adresse.slug) + app.get( + '/base-adresse/%s/streets?zipcode=13400&signature=zz&format=jsonp' + '&raise=1&jsonpCallback=f' % base_adresse.slug + ) @mock.patch('passerelle.utils.Request.get') @@ -261,27 +256,27 @@ def test_base_adresse_reverse(app, base_adresse, mock_api_adresse_data_gouv_fr_r assert data['address']['postcode'] == '49000' assert data['address']['citycode'] == '49007' + @mock.patch('passerelle.utils.Request.get') def test_base_adresse_reverse_having_several(mocked_get, app, base_adresse): content = json.loads(FAKED_CONTENT) - content['features'].append({ - "geometry": { - "type": "Point", - "coordinates": [-0.593775, 47.474633] - }, - "properties": { - "citycode": "49007", - "name": "Rue Eugène Bardon", - "id": "49007_6950_aaaaa", - "city": "Angers", - "context": "49, Maine-et-Loire, Pays de la Loire", - "score": 0.2, - "label": "Rue Eugène Bardon 49000 Angers", - "postcode": "49000", - "type": "street" - }, - "type": "Feature" - }) + content['features'].append( + { + "geometry": {"type": "Point", "coordinates": [-0.593775, 47.474633]}, + "properties": { + "citycode": "49007", + "name": "Rue Eugène Bardon", + "id": "49007_6950_aaaaa", + "city": "Angers", + "context": "49, Maine-et-Loire, Pays de la Loire", + "score": 0.2, + "label": "Rue Eugène Bardon 49000 Angers", + "postcode": "49000", + "type": "street", + }, + "type": "Feature", + } + ) faked_content = json.dumps(content) mocked_get.return_value = utils.FakedResponse(content=faked_content, status_code=200) resp = app.get('/base-adresse/%s/reverse?lon=-0.593775&lat=47.474633' % base_adresse.slug) @@ -309,14 +304,17 @@ def test_base_adresse_streets_unaccent(app, base_adresse, street): assert result['zipcode'] == street.zipcode assert result['id'] == str(street.id) + def test_base_adresse_streets_get_by_id(app, base_adresse, street): for i in range(10): # create additional streets - StreetModel.objects.create(city=u'Chambéry', - name=u'Une rue différente', - zipcode=str(73001 + i), - type='street', - citycode=str(73001 + i)) + StreetModel.objects.create( + city=u'Chambéry', + name=u'Une rue différente', + zipcode=str(73001 + i), + type='street', + citycode=str(73001 + i), + ) resp = app.get('/base-adresse/%s/streets?q=une rue tres acc' % base_adresse.slug) assert 'data' in resp.json @@ -339,11 +337,13 @@ def test_base_adresse_streets_get_by_id(app, base_adresse, street): def test_base_adresse_streets_get_by_codes(app, base_adresse, street): for i in range(20): - StreetModel.objects.create(city=u'Paris %d' % i, - name=u'La rue %d' % i, - zipcode=str(75000 + i*10), - type='street', - citycode=str(75000 + i*11)) + StreetModel.objects.create( + city=u'Paris %d' % i, + name=u'La rue %d' % i, + zipcode=str(75000 + i * 10), + type='street', + citycode=str(75000 + i * 11), + ) resp = app.get('/base-adresse/%s/streets?zipcode=75' % base_adresse.slug) assert 'data' in resp.json @@ -383,7 +383,9 @@ def test_base_adresse_command_update(mocked_get, db, base_adresse): with open(filepath, 'rb') as ban_file: mocked_get.return_value = utils.FakedResponse(content=ban_file.read(), status_code=200) call_command('cron', 'daily') - mocked_get.assert_called_once_with('https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-73.ndjson.gz') + mocked_get.assert_called_once_with( + 'https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-73.ndjson.gz' + ) streets = StreetModel.objects.all() assert len(streets) == 3 street = StreetModel.objects.order_by('id').first() @@ -406,7 +408,9 @@ def test_base_adresse_command_job_update(mocked_get, db, base_adresse): mocked_get.return_value = utils.FakedResponse(content=ban_file.read(), status_code=200) # check the job added at save() downloads streets base_adresse.jobs() - mocked_get.assert_called_once_with('https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-73.ndjson.gz') + mocked_get.assert_called_once_with( + 'https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-73.ndjson.gz' + ) assert StreetModel.objects.all().count() == 3 # second save doesn't download anything @@ -429,7 +433,9 @@ def test_base_adresse_command_update_97x(mocked_get, db, base_adresse_97x): with open(filepath, 'rb') as ban_file: mocked_get.return_value = utils.FakedResponse(content=ban_file.read(), status_code=200) call_command('cron', 'daily') - mocked_get.assert_called_once_with('https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-974.ndjson.gz') + mocked_get.assert_called_once_with( + 'https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-974.ndjson.gz' + ) assert StreetModel.objects.count() == 2 @@ -442,8 +448,12 @@ def test_base_adresse_command_update_corsica(mocked_get, db, base_adresse_corsic mocked_get.return_value = utils.FakedResponse(content=ban_file.read(), status_code=200) call_command('cron', 'daily') assert mocked_get.call_count == 2 - mocked_get.assert_any_call('https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-2A.ndjson.gz') - mocked_get.assert_any_call('https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-2B.ndjson.gz') + mocked_get.assert_any_call( + 'https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-2A.ndjson.gz' + ) + mocked_get.assert_any_call( + 'https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-2B.ndjson.gz' + ) assert StreetModel.objects.count() == 0 @@ -456,10 +466,18 @@ def test_base_adresse_command_update_multiple(mocked_get, db, base_adresse_multi mocked_get.return_value = utils.FakedResponse(content=ban_file.read(), status_code=200) call_command('cron', 'daily') assert mocked_get.call_count == 4 - mocked_get.assert_any_call('https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-73.ndjson.gz') - mocked_get.assert_any_call('https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-974.ndjson.gz') - mocked_get.assert_any_call('https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-2A.ndjson.gz') - mocked_get.assert_any_call('https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-2B.ndjson.gz') + mocked_get.assert_any_call( + 'https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-73.ndjson.gz' + ) + mocked_get.assert_any_call( + 'https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-974.ndjson.gz' + ) + mocked_get.assert_any_call( + 'https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-2A.ndjson.gz' + ) + mocked_get.assert_any_call( + 'https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-2B.ndjson.gz' + ) assert StreetModel.objects.count() == 5 @@ -511,8 +529,9 @@ def test_base_adresse_cities_dash_in_q(app, base_adresse, miquelon): def test_base_adresse_cities_region_department(app, base_adresse, miquelon, city): reg = RegionModel.objects.create(name=u'IdF', code='11') dep = DepartmentModel.objects.create(name=u'Paris', code='75', region=reg) - paris = CityModel.objects.create(name=u'Paris', code='75056', zipcode='75014', - population=2000000, region=reg, department=dep) + paris = CityModel.objects.create( + name=u'Paris', code='75056', zipcode='75014', population=2000000, region=reg, department=dep + ) resp = app.get('/base-adresse/%s/cities?department_code=73' % base_adresse.slug) result = resp.json['data'] @@ -608,8 +627,10 @@ def test_base_adresse_regions(app, base_adresse, region): @pytest.mark.usefixtures('mock_update_streets') @mock.patch('passerelle.utils.Request.get') def test_base_adresse_command_update_geo(mocked_get, db, base_adresse): - return_values = [utils.FakedResponse(content=content, status_code=200) - for content in (FAKE_API_GEO_REGIONS, FAKE_API_GEO_DEPARTMENTS, FAKE_API_GEO)] + return_values = [ + utils.FakedResponse(content=content, status_code=200) + for content in (FAKE_API_GEO_REGIONS, FAKE_API_GEO_DEPARTMENTS, FAKE_API_GEO) + ] mocked_get.side_effect = return_values call_command('cron', 'daily') assert mocked_get.call_count == 3 @@ -668,15 +689,19 @@ def test_base_adresse_command_update_geo(mocked_get, db, base_adresse): @pytest.mark.usefixtures('mock_update_streets') @mock.patch('passerelle.utils.Request.get') def test_base_adresse_command_update_geo_delete(mocked_get, db, base_adresse): - return_values = [utils.FakedResponse(content=content, status_code=200) - for content in (FAKE_API_GEO_REGIONS, FAKE_API_GEO_DEPARTMENTS, FAKE_API_GEO)] + return_values = [ + utils.FakedResponse(content=content, status_code=200) + for content in (FAKE_API_GEO_REGIONS, FAKE_API_GEO_DEPARTMENTS, FAKE_API_GEO) + ] mocked_get.side_effect = return_values call_command('cron', 'daily') assert CityModel.objects.count() == 3 new_fake_api_geo = json.dumps([FAKE_API_GEO_LIST[1]]) - return_values = [utils.FakedResponse(content=content, status_code=200) - for content in (FAKE_API_GEO_REGIONS, FAKE_API_GEO_DEPARTMENTS, new_fake_api_geo)] + return_values = [ + utils.FakedResponse(content=content, status_code=200) + for content in (FAKE_API_GEO_REGIONS, FAKE_API_GEO_DEPARTMENTS, new_fake_api_geo) + ] mocked_get.side_effect = return_values call_command('cron', 'daily') assert CityModel.objects.count() == 1 @@ -685,8 +710,10 @@ def test_base_adresse_command_update_geo_delete(mocked_get, db, base_adresse): @pytest.mark.usefixtures('mock_update_streets') @mock.patch('passerelle.utils.Request.get') def test_base_adresse_command_job_update_geo(mocked_get, db, base_adresse): - return_values = [utils.FakedResponse(content=content, status_code=200) - for content in (FAKE_API_GEO_REGIONS, FAKE_API_GEO_DEPARTMENTS, FAKE_API_GEO)] + return_values = [ + utils.FakedResponse(content=content, status_code=200) + for content in (FAKE_API_GEO_REGIONS, FAKE_API_GEO_DEPARTMENTS, FAKE_API_GEO) + ] mocked_get.side_effect = return_values # check the job added at save() downloads data base_adresse.jobs() @@ -752,8 +779,7 @@ def test_base_adresse_addresses_qs_page_limit(mocked_get, app, base_adresse): resp = app.get('/base-adresse/%s/addresses?q=plop&page_limit=100' % base_adresse.slug) assert 'limit=20' in mocked_get.call_args[0][0] - resp = app.get('/base-adresse/%s/addresses?q=plop&page_limit=blabla' % base_adresse.slug, - status=400) + resp = app.get('/base-adresse/%s/addresses?q=plop&page_limit=blabla' % base_adresse.slug, status=400) assert 'invalid value' in resp.json['err_desc'] @@ -794,16 +820,14 @@ def test_base_adresse_addresses_cache(app, base_adresse, mock_api_adresse_data_g assert AddressCacheModel.objects.count() == 1 # no new object has been created -def test_base_adresse_addresses_cache_err(app, base_adresse, - mock_api_adresse_data_gouv_fr_search): +def test_base_adresse_addresses_cache_err(app, base_adresse, mock_api_adresse_data_gouv_fr_search): resp = app.get('/base-adresse/%s/addresses?id=%s' % (base_adresse.slug, 'wrong_id')) assert mock_api_adresse_data_gouv_fr_search.call['count'] == 0 assert 'err' in resp.json @pytest.mark.usefixtures('mock_update_api_geo', 'mock_update_streets') -def test_base_adresse_addresses_clean_cache(app, base_adresse, freezer, - mock_api_adresse_data_gouv_fr_search): +def test_base_adresse_addresses_clean_cache(app, base_adresse, freezer, mock_api_adresse_data_gouv_fr_search): resp = app.get('/base-adresse/%s/addresses?q=plop' % base_adresse.slug) assert AddressCacheModel.objects.count() == 1 diff --git a/tests/test_cartads_cs.py b/tests/test_cartads_cs.py index 31e30f73..59182911 100644 --- a/tests/test_cartads_cs.py +++ b/tests/test_cartads_cs.py @@ -18,25 +18,27 @@ from passerelle.base.models import Job import utils + @pytest.fixture def connector(db): - return utils.make_resource(CartaDSCS, - title='Test', - slug='test', - description='...', - wsdl_base_url='http://test.invalid/adscs/webservices/', - username='test', - password='test', - iv='x'*16, - secret_key='y'*16, - ftp_server='ftp.invalid', - ftp_username='test', - ftp_password='test', - ftp_client_name='test' + return utils.make_resource( + CartaDSCS, + title='Test', + slug='test', + description='...', + wsdl_base_url='http://test.invalid/adscs/webservices/', + username='test', + password='test', + iv='x' * 16, + secret_key='y' * 16, + ftp_server='ftp.invalid', + ftp_username='test', + ftp_password='test', + ftp_client_name='test', ) -class FakeService(): +class FakeService: def GetCommunes(self, token, options=None): return [{'Key': 2, 'Value': 'AIGREFEUILLE SUR MAINE'}] @@ -47,31 +49,39 @@ class FakeService(): return [{'Key': 1, 'Value': "CU d'information"}] def GetListePdf(self, token, type_dossier_id, options=None): - return [{'UrlTelechargement': 'https://invalid/adscs/webservices/ServicePDF.ashx?pdf=13410*04', - 'Nom': 'Cerfa 13410-04', - 'Description': "Demande de Certificat d'urbanisme", - 'Identifiant': '13410*04'}] + return [ + { + 'UrlTelechargement': 'https://invalid/adscs/webservices/ServicePDF.ashx?pdf=13410*04', + 'Nom': 'Cerfa 13410-04', + 'Description': "Demande de Certificat d'urbanisme", + 'Identifiant': '13410*04', + } + ] def GetPieces(self, token, type_dossier_id, objet_demande_id): - return [{'IdPiece': 1065, - 'Libelle': 'DECLARATION PREALABLE INCOMPLETE', - 'CodePiece': 'CU', - 'Descriptif': 'Complétez la rubrique', - 'Reglementaire': False, - }, - {'IdPiece': 1, - 'Libelle': 'Plan de situation du terrain', - 'CodePiece': 'CU01', - 'Descriptif': 'Un plan de situation du terrain [Art. R. 410-1 al 1 du code de l\'urbanisme]', - 'Reglementaire': True, - }, - {'IdPiece': 62, - 'Libelle': u'Plan de masse des constructions à démolir', - 'Descriptif': 'Un plan de masse des constructions...', - 'CodePiece': 'PCA1', - 'Reglementaire': False, - }, - ] + return [ + { + 'IdPiece': 1065, + 'Libelle': 'DECLARATION PREALABLE INCOMPLETE', + 'CodePiece': 'CU', + 'Descriptif': 'Complétez la rubrique', + 'Reglementaire': False, + }, + { + 'IdPiece': 1, + 'Libelle': 'Plan de situation du terrain', + 'CodePiece': 'CU01', + 'Descriptif': 'Un plan de situation du terrain [Art. R. 410-1 al 1 du code de l\'urbanisme]', + 'Reglementaire': True, + }, + { + 'IdPiece': 62, + 'Libelle': u'Plan de masse des constructions à démolir', + 'Descriptif': 'Un plan de masse des constructions...', + 'CodePiece': 'PCA1', + 'Reglementaire': False, + }, + ] def NotifierDepotDossier(self, token, commune_id, type_dossier_id, filename, email, infos): return 'True' @@ -82,81 +92,101 @@ class FakeService(): except CartaDSDossier.DoesNotExist: dossier = None if dossier and dossier.tracking_code == 'DOCXXXX': - return [{ - 'LibelleEtape': 'Attente DOC', - 'IdEtapeDossier': 2356473, - 'DateEcheance': '2022-04-01T00:00:00', - 'IdEtape': 12, - 'DateRealisation': None, - 'DateReference': '2019-03-25T00:00:00', - 'IdDossier': 471160},] + return [ + { + 'LibelleEtape': 'Attente DOC', + 'IdEtapeDossier': 2356473, + 'DateEcheance': '2022-04-01T00:00:00', + 'IdEtape': 12, + 'DateRealisation': None, + 'DateReference': '2019-03-25T00:00:00', + 'IdDossier': 471160, + }, + ] elif dossier and dossier.tracking_code == 'DAACTXXXX': - return [{ - 'LibelleEtape': 'Attente DAACT', - 'IdEtapeDossier': 2356473, - 'DateEcheance': '2022-04-01T00:00:00', - 'IdEtape': 12, - 'DateRealisation': None, - 'DateReference': '2019-03-25T00:00:00', - 'IdDossier': 471160},] + return [ + { + 'LibelleEtape': 'Attente DAACT', + 'IdEtapeDossier': 2356473, + 'DateEcheance': '2022-04-01T00:00:00', + 'IdEtape': 12, + 'DateRealisation': None, + 'DateReference': '2019-03-25T00:00:00', + 'IdDossier': 471160, + }, + ] # default - return [{ - 'DateEcheance': datetime.datetime(2019, 3, 1, 0, 0), - 'DateRealisation': None, - 'DateReference': datetime.datetime(2019, 2, 14, 0, 0), - 'IdDossier': 135792, - 'IdEtape': 1, - 'IdEtapeDossier': 692232, - 'LibelleEtape': 'En cours de saisie' - }] + return [ + { + 'DateEcheance': datetime.datetime(2019, 3, 1, 0, 0), + 'DateRealisation': None, + 'DateReference': datetime.datetime(2019, 2, 14, 0, 0), + 'IdDossier': 135792, + 'IdEtape': 1, + 'IdEtapeDossier': 692232, + 'LibelleEtape': 'En cours de saisie', + } + ] def GetPiecesDossierACompleter(self, token, dossier_id): return [ - OrderedDict([ - (u'CodePiece', 'PC07'), - (u'DateDemande', datetime.datetime(2019, 4, 15, 0, 0)), - (u'DatePresentation', None), - (u'DateReception', None), - (u'Descriptif', u"Un document graphique..."), - (u'IdDosPiece', 133837), - (u'IdPiece', 58), - (u'LibellePiece', u"Document graphique permettant..."), - (u'NbDocuments', 0)]), - OrderedDict([ - (u'CodePiece', 'PC16-1'), - (u'DateDemande', datetime.datetime(2019, 4, 15, 0, 0)), - (u'DatePresentation', None), - (u'DateReception', None), - (u'Descriptif', u'Formulaire attestant...'), - (u'IdDosPiece', 133840), - (u'IdPiece', 99), - (u'LibellePiece', u'Formulaire attestant...'), - (u'NbDocuments', 0)]), + OrderedDict( + [ + (u'CodePiece', 'PC07'), + (u'DateDemande', datetime.datetime(2019, 4, 15, 0, 0)), + (u'DatePresentation', None), + (u'DateReception', None), + (u'Descriptif', u"Un document graphique..."), + (u'IdDosPiece', 133837), + (u'IdPiece', 58), + (u'LibellePiece', u"Document graphique permettant..."), + (u'NbDocuments', 0), + ] + ), + OrderedDict( + [ + (u'CodePiece', 'PC16-1'), + (u'DateDemande', datetime.datetime(2019, 4, 15, 0, 0)), + (u'DatePresentation', None), + (u'DateReception', None), + (u'Descriptif', u'Formulaire attestant...'), + (u'IdDosPiece', 133840), + (u'IdPiece', 99), + (u'LibellePiece', u'Formulaire attestant...'), + (u'NbDocuments', 0), + ] + ), ] def GetPiecesDaact(self, token, dossier_id): return [ - OrderedDict([ - (u'CodePiece', 'AT1'), - (u'DateDemande', None), - (u'DatePresentation', None), - (u'DateReception', None), - (u'Descriptif', u"L'attestation constatant..."), - (u'IdDosPiece', 0), - (u'IdPiece', 191), - (u'LibellePiece', 'Attestation constat des travaux'), - (u'NbDocuments', 0)]), - OrderedDict([ - (u'CodePiece', 'AT2'), - (u'DateDemande', None), - (u'DatePresentation', None), - (u'DateReception', None), - (u'Descriptif', u"Dans les cas..."), - (u'IdDosPiece', 0), - (u'IdPiece', 192), - (u'LibellePiece', u'Document du...'), - (u'NbDocuments', 0)]), + OrderedDict( + [ + (u'CodePiece', 'AT1'), + (u'DateDemande', None), + (u'DatePresentation', None), + (u'DateReception', None), + (u'Descriptif', u"L'attestation constatant..."), + (u'IdDosPiece', 0), + (u'IdPiece', 191), + (u'LibellePiece', 'Attestation constat des travaux'), + (u'NbDocuments', 0), + ] + ), + OrderedDict( + [ + (u'CodePiece', 'AT2'), + (u'DateDemande', None), + (u'DatePresentation', None), + (u'DateReception', None), + (u'Descriptif', u"Dans les cas..."), + (u'IdDosPiece', 0), + (u'IdPiece', 192), + (u'LibellePiece', u'Document du...'), + (u'NbDocuments', 0), + ] + ), ] def UploadFile(self, FileByteStream, _soapheaders): @@ -169,14 +199,17 @@ class FakeService(): def GetInfosDossier(self, token, id_dossier): assert id_dossier in (123, '135792') - return OrderedDict([ - (u'AdresseTerrain', u'all\xe9e des Fleurs'), - (u'CoTypeDossier', 'PC'), - (u'Commune', u'AIGREFEUILLE SUR MAINE'), - (u'DateDepot', datetime.datetime(2019, 9, 19, 0, 0)), - (u'IdDossier', 478864), - (u'NomDossier', 'PC 069 085 19 00010'), - (u'TypeDossier', 'Permis de construire')]) + return OrderedDict( + [ + (u'AdresseTerrain', u'all\xe9e des Fleurs'), + (u'CoTypeDossier', 'PC'), + (u'Commune', u'AIGREFEUILLE SUR MAINE'), + (u'DateDepot', datetime.datetime(2019, 9, 19, 0, 0)), + (u'IdDossier', 478864), + (u'NomDossier', 'PC 069 085 19 00010'), + (u'TypeDossier', 'Permis de construire'), + ] + ) def GetMotPasse(self, token, id_dossier): return 'D8B912CE-2A0C-4504-AE3B-74F2EF6BABA6' @@ -193,10 +226,12 @@ def cached_data(connector, app): with HTTMock(pdf_mock): connector.daily() + def test_communes(connector, app, cached_data): resp = app.get('/cartads-cs/test/communes') assert resp.json == {'data': [{'text': 'AIGREFEUILLE SUR MAINE', 'id': '2'}], 'err': 0} + def test_types_dossier(connector, app, cached_data): resp = app.get('/cartads-cs/test/types_dossier', status=400) resp = app.get('/cartads-cs/test/types_dossier?commune_id=2') @@ -208,16 +243,25 @@ def test_types_dossier(connector, app, cached_data): resp = app.get('/cartads-cs/test/types_dossier?commune_id=2&filter=AT') assert resp.json == {'data': [], 'err': 0} + def test_objets_demande(connector, app, cached_data): resp = app.get('/cartads-cs/test/objets_demande?type_dossier_id=CU') assert resp.json == {'data': [{'id': '1', 'text': "CU d'information"}], 'err': 0} + def test_liste_pdf(connector, app, cached_data): resp = app.get('/cartads-cs/test/liste_pdf?type_dossier_id=CU') - assert resp.json == {'data': [{'id': '13410*04', - 'text': "Cerfa 13410-04: Demande de Certificat d'urbanisme", - 'url': 'http://testserver/media/public/cartads_cs/test/documents/cerfa_13410-04.pdf'}], - 'err': 0} + assert resp.json == { + 'data': [ + { + 'id': '13410*04', + 'text': "Cerfa 13410-04: Demande de Certificat d'urbanisme", + 'url': 'http://testserver/media/public/cartads_cs/test/documents/cerfa_13410-04.pdf', + } + ], + 'err': 0, + } + def test_pieces_management(connector, app, cached_data): resp = app.get('/cartads-cs/test/pieces?type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB') @@ -235,28 +279,26 @@ def test_pieces_management(connector, app, cached_data): assert len(piece['files']) == 1 assert list(piece['files'][0].keys()) == ['url'] - resp = app.get('/cartads-cs/test/check_pieces?type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB') + resp = app.get( + '/cartads-cs/test/check_pieces?type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB' + ) assert resp.json == {'result': False, 'err': 0} resp = app.post(data[0]['files'][0]['url'], upload_files=[]) assert resp.json == [] - resp = app.post(data[0]['files'][0]['url'], - upload_files=[('foobar', 'test.pdf', b'%PDF...')]) + resp = app.post(data[0]['files'][0]['url'], upload_files=[('foobar', 'test.pdf', b'%PDF...')]) assert resp.json == [] - resp = app.post(data[0]['files'][0]['url'], - upload_files=[('files[]', 'test.pdf', b'%PDF...')]) + resp = app.post(data[0]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', b'%PDF...')]) assert resp.json == [{'error': 'The CERFA should be a PDF file.'}] pdf_contents = open(os.path.join(os.path.dirname(__file__), 'data', 'minimal.pdf'), 'rb').read() - resp = app.post(data[0]['files'][0]['url'], - upload_files=[('files[]', 'test.pdf', pdf_contents)]) + resp = app.post(data[0]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', pdf_contents)]) assert resp.json == [{'error': 'The CERFA should not be a scanned document.'}] pdf_contents = open(os.path.join(os.path.dirname(__file__), 'data', 'pdf-form.pdf'), 'rb').read() - resp = app.post(data[0]['files'][0]['url'], - upload_files=[('files[]', 'test.pdf', pdf_contents)]) + resp = app.post(data[0]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', pdf_contents)]) cerfa_token = resp.json[0]['token'] resp = app.get('/cartads-cs/test/pieces?type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB') @@ -272,48 +314,56 @@ def test_pieces_management(connector, app, cached_data): resp = app.get('/cartads-cs/test/pieces?type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB') assert [x['id'] for x in resp.json['data']] == ['cerfa-CU-1', 'cerfa-autres-CU-1', '1', '1065', '62'] - resp = app.get('/cartads-cs/test/pieces?type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB&demolitions=true') + resp = app.get( + '/cartads-cs/test/pieces?type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB&demolitions=true' + ) assert [x['id'] for x in resp.json['data']] == ['cerfa-CU-1', 'cerfa-autres-CU-1', '1', '1065', '62'] - resp = app.get('/cartads-cs/test/pieces?type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB&demolitions=false') + resp = app.get( + '/cartads-cs/test/pieces?type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB&demolitions=false' + ) assert [x['id'] for x in resp.json['data']] == ['cerfa-CU-1', 'cerfa-autres-CU-1', '1', '1065'] - resp = app.post(data[0]['files'][0]['url'], - upload_files=[('files[]', 'test.pdf', pdf_contents)]) + resp = app.post(data[0]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', pdf_contents)]) - resp = app.post(data[1]['files'][0]['url'], - upload_files=[('files[]', 'test.pdf', pdf_contents)]) - resp = app.post(data[1]['files'][0]['url'], - upload_files=[('files[]', 'test.pdf', pdf_contents)]) + resp = app.post(data[1]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', pdf_contents)]) + resp = app.post(data[1]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', pdf_contents)]) resp = app.get('/cartads-cs/test/pieces?type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB') data = resp.json['data'] assert len(data[1]['files']) == 3 - resp = app.get('/cartads-cs/test/check_pieces?type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB') + resp = app.get( + '/cartads-cs/test/check_pieces?type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB' + ) assert resp.json == {'result': False, 'err': 0} - resp = app.post(data[2]['files'][0]['url'], - upload_files=[('files[]', 'test.pdf', pdf_contents)]) + resp = app.post(data[2]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', pdf_contents)]) - resp = app.get('/cartads-cs/test/check_pieces?type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB') + resp = app.get( + '/cartads-cs/test/check_pieces?type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB' + ) assert resp.json == {'result': True, 'err': 0} - resp = app.post(data[2]['files'][0]['url'], - upload_files=[('files[]', 'test.jpeg', b'...')]) + resp = app.post(data[2]['files'][0]['url'], upload_files=[('files[]', 'test.jpeg', b'...')]) assert resp.json[0]['token'] resp = app.post(data[2]['files'][0]['url'], upload_files=[('files[]', 'test.gif', b'...')]) assert resp.json == [{'error': 'The file should be a PDF document or a JPEG image.'}] + def test_send(connector, app, cached_data): CartaDSFile.objects.all().delete() Job.objects.all().delete() test_pieces_management(connector, app, cached_data) - resp = app.get('/cartads-cs/test/send?commune_id=2&type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB&email=test@invalid') + resp = app.get( + '/cartads-cs/test/send?commune_id=2&type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB&email=test@invalid' + ) CartaDSDossier.objects.all().delete() Job.objects.all().delete() - resp = app.get('/cartads-cs/test/send?commune_id=2&type_dossier_id=CU' - '&objet_demande_id=1&tracking_code=BBBBBBBB&email=test@invalid&name_id=1234') + resp = app.get( + '/cartads-cs/test/send?commune_id=2&type_dossier_id=CU' + '&objet_demande_id=1&tracking_code=BBBBBBBB&email=test@invalid&name_id=1234' + ) assert CartaDSDossier.objects.all().count() == 1 dossier = CartaDSDossier.objects.all().first() assert resp.json['dossier_id'] == dossier.id @@ -330,7 +380,10 @@ def test_send(connector, app, cached_data): dossier = CartaDSDossier.objects.get(id=dossier.id) assert dossier.zip_ack_response == 'True' - resp = app.post(dossier.notification_url, params={'notification': ''' + resp = app.post( + dossier.notification_url, + params={ + 'notification': ''' SKTJCMPD.zip 2019-02-14T00:00:00 @@ -342,17 +395,22 @@ def test_send(connector, app, cached_data): 135792 CU 044 043 19 A0006 -'''}) +''' + }, + ) dossier = CartaDSDossier.objects.get(id=dossier.id) assert dossier.cartads_id_dossier == '135792' assert dossier.cartads_numero_dossier == 'CU 044 043 19 A0006' + def test_send_notification_error(connector, app, cached_data): CartaDSFile.objects.all().delete() Job.objects.all().delete() test_pieces_management(connector, app, cached_data) - resp = app.get('/cartads-cs/test/send?commune_id=2&type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB&email=test@invalid') + resp = app.get( + '/cartads-cs/test/send?commune_id=2&type_dossier_id=CU&objet_demande_id=1&tracking_code=BBBBBBBB&email=test@invalid' + ) assert CartaDSDossier.objects.all().count() == 1 dossier = CartaDSDossier.objects.all().first() assert resp.json['dossier_id'] == dossier.id @@ -369,7 +427,10 @@ def test_send_notification_error(connector, app, cached_data): dossier = CartaDSDossier.objects.get(id=dossier.id) assert dossier.zip_ack_response == 'True' - resp = app.post(dossier.notification_url, params={'notification': ''' + resp = app.post( + dossier.notification_url, + params={ + 'notification': ''' DJWQWLNZ.zip 2019-05-09T14:50:16.516718 @@ -382,7 +443,9 @@ def test_send_notification_error(connector, app, cached_data): -'''}) +''' + }, + ) dossier = CartaDSDossier.objects.get(id=dossier.id) assert dossier.cartads_id_dossier is None @@ -415,13 +478,17 @@ def test_status(connector, app, cached_data): assert CartaDSDossier.objects.get(pk=dossier.id).cartads_cache_code_acces assert CartaDSDossier.objects.get(pk=dossier.id).cartads_cache_infos + def test_status_error(connector, app, cached_data): CartaDSDossier.objects.all().delete() test_send_notification_error(connector, app, cached_data) dossier = CartaDSDossier.objects.all()[0] resp = app.get('/cartads-cs/test/status?dossier_id=%s' % dossier.id) - assert resp.json['status_label'] == u"File refused (Le modèle sélectionné ne correspond à aucun Cerfa géré par l'application.)" + assert ( + resp.json['status_label'] + == u"File refused (Le modèle sélectionné ne correspond à aucun Cerfa géré par l'application.)" + ) def test_status_zip_not_considered_error(connector, app, cached_data): @@ -452,11 +519,9 @@ def test_additional_pieces_management(connector, app, cached_data): assert len(piece['files']) == 1 assert list(piece['files'][0].keys()) == ['url'] - resp = app.post(data[0]['files'][0]['url'], - upload_files=[('files[]', 'test.pdf', b'%PDF...')]) + resp = app.post(data[0]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', b'%PDF...')]) assert resp.json[0]['token'] - assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, - sent_to_cartads=None).count() == 1 + assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, sent_to_cartads=None).count() == 1 Job.objects.all().delete() resp = app.get('/cartads-cs/test/send_additional_pieces?tracking_code=%s' % dossier.tracking_code) @@ -467,8 +532,7 @@ def test_additional_pieces_management(connector, app, cached_data): connector.jobs() assert Job.objects.filter(method_name='send_additional_pieces_to_cartads', status='completed').count() - assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, - sent_to_cartads=None).count() == 0 + assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, sent_to_cartads=None).count() == 0 def test_doc_pieces_management(connector, app, cached_data): @@ -494,11 +558,9 @@ def test_doc_pieces_management(connector, app, cached_data): assert list(data[0]['files'][0].keys()) == ['url'] pdf_contents = open(os.path.join(os.path.dirname(__file__), 'data', 'pdf-form.pdf'), 'rb').read() - resp = app.post(data[0]['files'][0]['url'], - upload_files=[('files[]', 'test.pdf', pdf_contents)]) + resp = app.post(data[0]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', pdf_contents)]) assert resp.json[0]['token'] - assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, - sent_to_cartads=None).count() == 1 + assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, sent_to_cartads=None).count() == 1 Job.objects.all().delete() resp = app.get('/cartads-cs/test/send_doc_pieces?tracking_code=%s' % dossier.tracking_code) @@ -509,8 +571,7 @@ def test_doc_pieces_management(connector, app, cached_data): connector.jobs() assert Job.objects.filter(method_name='send_doc_pieces_to_cartads', status='completed').count() - assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, - sent_to_cartads=None).count() == 0 + assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, sent_to_cartads=None).count() == 0 def test_daact_pieces_management(connector, app, cached_data): @@ -537,18 +598,14 @@ def test_daact_pieces_management(connector, app, cached_data): assert list(piece['files'][0].keys()) == ['url'] pdf_contents = open(os.path.join(os.path.dirname(__file__), 'data', 'pdf-form.pdf'), 'rb').read() - resp = app.post(data[0]['files'][0]['url'], - upload_files=[('files[]', 'test.pdf', pdf_contents)]) + resp = app.post(data[0]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', pdf_contents)]) assert resp.json[0]['token'] - assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, - sent_to_cartads=None).count() == 1 + assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, sent_to_cartads=None).count() == 1 pdf_contents = open(os.path.join(os.path.dirname(__file__), 'data', 'pdf-form.pdf'), 'rb').read() - resp = app.post(data[1]['files'][0]['url'], - upload_files=[('files[]', 'test.pdf', pdf_contents)]) + resp = app.post(data[1]['files'][0]['url'], upload_files=[('files[]', 'test.pdf', pdf_contents)]) assert resp.json[0]['token'] - assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, - sent_to_cartads=None).count() == 2 + assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, sent_to_cartads=None).count() == 2 Job.objects.all().delete() resp = app.get('/cartads-cs/test/send_daact_pieces?tracking_code=%s' % dossier.tracking_code) @@ -559,8 +616,7 @@ def test_daact_pieces_management(connector, app, cached_data): connector.jobs() assert Job.objects.filter(method_name='send_daact_pieces_to_cartads', status='completed').count() - assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, - sent_to_cartads=None).count() == 0 + assert CartaDSFile.objects.filter(tracking_code=dossier.tracking_code, sent_to_cartads=None).count() == 0 def test_list_of_files(connector, app, cached_data): @@ -602,7 +658,9 @@ def test_join(connector, app, cached_data): # new with mock.patch('passerelle.apps.cartads_cs.models.CartaDSCS.soap_client') as client: client.return_value = mock.Mock(service=FakeService()) - resp = app.get('/cartads-cs/test/join?name_id=3456&dossier_number=123&dossier_password=XXX&formdata_url=https://etc.') + resp = app.get( + '/cartads-cs/test/join?name_id=3456&dossier_number=123&dossier_password=XXX&formdata_url=https://etc.' + ) dossier = CartaDSDossier.objects.get(id=resp.json['dossier_id']) assert dossier.commune_id == '2' assert dossier.type_dossier_id == 'PC' @@ -612,7 +670,9 @@ def test_join(connector, app, cached_data): # existing with mock.patch('passerelle.apps.cartads_cs.models.CartaDSCS.soap_client') as client: client.return_value = mock.Mock(service=FakeService()) - resp = app.get('/cartads-cs/test/join?name_id=2345&dossier_number=123&dossier_password=XXX&formdata_url=other') + resp = app.get( + '/cartads-cs/test/join?name_id=2345&dossier_number=123&dossier_password=XXX&formdata_url=other' + ) dossier = CartaDSDossier.objects.get(id=resp.json['dossier_id']) assert CartaDSDossier.objects.count() == 1 assert dossier.commune_id == '2' @@ -657,7 +717,8 @@ def test_role_sync(connector, app, cached_data): if url.path == '/api/roles/': dossier = CartaDSDossier.objects.all().first() assert json_loads(request.body) == json.loads( - '{"name": "Suivi Cart@DS (%s)", "slug": "_cartads_%s"}' % (dossier.id, dossier.id)) + '{"name": "Suivi Cart@DS (%s)", "slug": "_cartads_%s"}' % (dossier.id, dossier.id) + ) return {'content': json.dumps({'uuid': 'role-uuid'}), 'status_code': 200} elif url.path == '/api/roles/role-uuid/relationships/members/': body = json_loads(request.body) @@ -666,13 +727,15 @@ def test_role_sync(connector, app, cached_data): raise Exception('unhandled http call (%s)' % url) with HTTMock(idp_mock), override_settings( - KNOWN_SERVICES={ - 'authentic': { - 'idp': { - 'url': 'http://idp.example.org/', - 'verif_orig': 'abc', - 'secret': 'def', - } - }}): + KNOWN_SERVICES={ + 'authentic': { + 'idp': { + 'url': 'http://idp.example.org/', + 'verif_orig': 'abc', + 'secret': 'def', + } + } + } + ): test_join(connector, app, cached_data) assert idp_mock.subscribed_roles == set(['2345', '3456']) diff --git a/tests/test_cityweb.py b/tests/test_cityweb.py index 7e509155..5b131124 100644 --- a/tests/test_cityweb.py +++ b/tests/test_cityweb.py @@ -51,15 +51,9 @@ def setup(db): PAYLOAD = [ - { - 'birth': json.loads(get_file_from_test_base_dir('payload_birth.json')) - }, - { - 'mariage': json.loads(get_file_from_test_base_dir('payload_mariage.json')) - }, - { - 'death': json.loads(get_file_from_test_base_dir('payload_death.json')) - } + {'birth': json.loads(get_file_from_test_base_dir('payload_birth.json'))}, + {'mariage': json.loads(get_file_from_test_base_dir('payload_mariage.json'))}, + {'death': json.loads(get_file_from_test_base_dir('payload_death.json'))}, ] @@ -69,8 +63,7 @@ def payload(request): def assert_xml_doc(filename, assertions): - schema = etree.XMLSchema( - etree.parse(open(os.path.join(get_test_base_dir('cityweb'), 'cityweb.xsd')))) + schema = etree.XMLSchema(etree.parse(open(os.path.join(get_test_base_dir('cityweb'), 'cityweb.xsd')))) content = open(filename).read() xml_content = etree.fromstring(content) @@ -99,7 +92,8 @@ def test_demand_creation(app, setup, payload): 'demandeur.individu.adresse.lieu.ville': 'Nancy', 'demandeur.individu.adresse.lieu.pays': 'France', 'demandeur.individu.adresse.mail': 'chelsea@whatever.com', - 'natureDocument': 'CPI', 'nbExemplaire': 1, + 'natureDocument': 'CPI', + 'nbExemplaire': 1, 'dateDemande': '2016-10-20T14:41:20Z', 'evenement.natureEvenement': 'NAI', 'evenement.dateEvenement.dateDebut': '2012-07-14', @@ -127,7 +121,8 @@ def test_demand_creation(app, setup, payload): 'demandeur.individu.adresse.lieu.ville': 'Nancy', 'demandeur.individu.adresse.lieu.pays': 'France', 'demandeur.individu.adresse.mail': 'chelsea@whatever.com', - 'natureDocument': 'CPI', 'nbExemplaire': 1, + 'natureDocument': 'CPI', + 'nbExemplaire': 1, 'dateDemande': '2016-10-20T14:41:20Z', 'evenement.natureEvenement': 'MAR', 'evenement.dateEvenement.dateDebut': '2012-07-14', @@ -159,7 +154,8 @@ def test_demand_creation(app, setup, payload): 'demandeur.individu.adresse.lieu.ville': 'Nancy', 'demandeur.individu.adresse.lieu.pays': 'France', 'demandeur.individu.adresse.mail': 'chelsea@whatever.com', - 'natureDocument': 'EXTSF', 'nbExemplaire': 1, + 'natureDocument': 'EXTSF', + 'nbExemplaire': 1, 'dateDemande': '2016-10-20T14:41:20Z', 'evenement.natureEvenement': 'DEC', 'evenement.dateEvenement.dateDebut': '2012-07-14', @@ -171,14 +167,15 @@ def test_demand_creation(app, setup, payload): def test_date_type_parsing(): - class BirthDate(DateType): tagname = 'date' with pytest.raises(APIError, match=r'Invalid date \(toto\) for '): BirthDate('toto') - with pytest.raises(APIError, match=r'Invalid date \(2017-02-30\) for : day is out of range for month'): + with pytest.raises( + APIError, match=r'Invalid date \(2017-02-30\) for : day is out of range for month' + ): BirthDate('2017-02-30') assert BirthDate('2017-02-22').value == '2017-02-22' diff --git a/tests/test_clicrdv.py b/tests/test_clicrdv.py index 934da76f..51bb111b 100644 --- a/tests/test_clicrdv.py +++ b/tests/test_clicrdv.py @@ -14,8 +14,9 @@ from test_manager import login @pytest.fixture def connector(db): - return ClicRdv.objects.create(slug='test', group_id='5242', apikey='test', username='test', - password='test') + return ClicRdv.objects.create( + slug='test', group_id='5242', apikey='test', username='test', password='test' + ) def test_connector_is_legacy(connector, app, admin_user): @@ -43,23 +44,24 @@ def test_request_call(mocked_request, app, connector): def test_interventionsets(mocked_request, app, connector): response = mock.Mock() response.json.return_value = { - "totalRecords": 2, - "records": [ - { - "sort": 1, - "publicname": "Une Demande de Passeport", - "name": "Demande", - "id": 7032, - "group_id": 5242, - }, - { - "sort": 2, - "publicname": "Un Retrait de Passeport", - "name": "Retrait", - "id": 7033, - "group_id": 5242, - }, - ]} + "totalRecords": 2, + "records": [ + { + "sort": 1, + "publicname": "Une Demande de Passeport", + "name": "Demande", + "id": 7032, + "group_id": 5242, + }, + { + "sort": 2, + "publicname": "Un Retrait de Passeport", + "name": "Retrait", + "id": 7033, + "group_id": 5242, + }, + ], + } mocked_request.return_value = response resp = app.get('/clicrdv/test/interventionsets/') assert len(resp.json.get('data')) == 2 @@ -70,29 +72,30 @@ def test_interventionsets(mocked_request, app, connector): def test_interventionsets_details(mocked_request, app, connector): response = mock.Mock() response.json.return_value = { - "totalRecords": 2, - "records": [ - { - "sort": 1, - "publicname": "pour une personne", - "description": None, - "name": "1 personne", - "interventionset_id": 7032, - "group_id": 5242, - "id": 63258, - "abbr": "1 demande" - }, - { - "sort": 2, - "publicname": "pour deuxs personnes", - "description": None, - "name": "2 personnes", - "interventionset_id": 7032, - "group_id": 5242, - "id": 63259, - "abbr": "2 demandes" - }, - ]} + "totalRecords": 2, + "records": [ + { + "sort": 1, + "publicname": "pour une personne", + "description": None, + "name": "1 personne", + "interventionset_id": 7032, + "group_id": 5242, + "id": 63258, + "abbr": "1 demande", + }, + { + "sort": 2, + "publicname": "pour deuxs personnes", + "description": None, + "name": "2 personnes", + "interventionset_id": 7032, + "group_id": 5242, + "id": 63259, + "abbr": "2 demandes", + }, + ], + } mocked_request.return_value = response resp = app.get('/clicrdv/test/interventionsets/7032/') assert len(resp.json.get('data')) == 2 @@ -124,10 +127,9 @@ def test_interventions_get_datetimes(mocked_request, app, connector): assert query['apikey'] == ['test'] assert query['format'] == ['json'] - response.json.return_value = {"availabletimeslots": [ - { "start": "2016-09-21 12:34:56" }, - { "start": "2016-09-22 11:22:33" } - ]} + response.json.return_value = { + "availabletimeslots": [{"start": "2016-09-21 12:34:56"}, {"start": "2016-09-22 11:22:33"}] + } mocked_request.return_value = response resp = app.get('/clicrdv/test/interventions/63258/dates/').json assert mocked_request.call_count == 2 @@ -136,10 +138,9 @@ def test_interventions_get_datetimes(mocked_request, app, connector): assert resp['data'][0] == {'id': '2016-09-21', 'text': '21 September 2016'} assert resp['data'][1] == {'id': '2016-09-22', 'text': '22 September 2016'} - response.json.return_value = {"availabletimeslots": [ - { "start": "2016-09-22 11:22:33" }, - { "start": "2016-09-21 12:34:56" } - ]} # will be sorted + response.json.return_value = { + "availabletimeslots": [{"start": "2016-09-22 11:22:33"}, {"start": "2016-09-21 12:34:56"}] + } # will be sorted mocked_request.return_value = response resp = app.get('/clicrdv/test/interventions/63258/datetimes/').json assert mocked_request.call_count == 3 @@ -148,10 +149,9 @@ def test_interventions_get_datetimes(mocked_request, app, connector): assert resp['data'][0] == {'id': '2016-09-21-12:34:56', 'text': '21 September 2016 12:34'} assert resp['data'][1] == {'id': '2016-09-22-11:22:33', 'text': '22 September 2016 11:22'} - response.json.return_value = {"availabletimeslots": [ - { "start": "2016-09-21 12:34:56" }, - { "start": "2016-09-21 11:22:33" } - ]} # will be sorted + response.json.return_value = { + "availabletimeslots": [{"start": "2016-09-21 12:34:56"}, {"start": "2016-09-21 11:22:33"}] + } # will be sorted mocked_request.return_value = response resp = app.get('/clicrdv/test/interventions/63258/2016-09-21/times').json assert mocked_request.call_count == 4 @@ -185,16 +185,16 @@ def test_interventions_get_datetimes_error(mocked_request, app, connector): @mock.patch('passerelle.utils.Request.request') def test_cancel_appointment(mocked_request, app, connector): obj_type = ContentType.objects.get_for_model(ClicRdv) - apiuser = ApiUser.objects.create(username='apiuser', keytype='API', - key='apiuser') - AccessRight.objects.create(codename='can_manage_appointment', - resource_type=obj_type, resource_pk=connector.pk, - apiuser=apiuser) + apiuser = ApiUser.objects.create(username='apiuser', keytype='API', key='apiuser') + AccessRight.objects.create( + codename='can_manage_appointment', resource_type=obj_type, resource_pk=connector.pk, apiuser=apiuser + ) resp = app.get('/clicrdv/test/63258/cancel?apikey=apiuser').json assert mocked_request.call_count == 1 assert resp['data']['success'] + @mock.patch('passerelle.utils.Request.request') def test_failed_cancel_appointment(mocked_request, app, connector): def raise_for_status(): @@ -205,11 +205,10 @@ def test_failed_cancel_appointment(mocked_request, app, connector): response.raise_for_status = raise_for_status mocked_request.return_value = response obj_type = ContentType.objects.get_for_model(ClicRdv) - apiuser = ApiUser.objects.create(username='apiuser', keytype='API', - key='apiuser') - AccessRight.objects.create(codename='can_manage_appointment', - resource_type=obj_type, resource_pk=connector.pk, - apiuser=apiuser) + apiuser = ApiUser.objects.create(username='apiuser', keytype='API', key='apiuser') + AccessRight.objects.create( + codename='can_manage_appointment', resource_type=obj_type, resource_pk=connector.pk, apiuser=apiuser + ) resp = app.get('/clicrdv/test/63258/cancel?apikey=apiuser').json assert mocked_request.call_count == 1 assert resp.get('err') == 0 @@ -227,15 +226,21 @@ def test_failed_appointment_creation(mocked_request, app, connector): response.raise_for_status = raise_for_status mocked_request.return_value = response obj_type = ContentType.objects.get_for_model(ClicRdv) - apiuser = ApiUser.objects.create(username='apiuser', keytype='API', - key='apiuser') - AccessRight.objects.create(codename='can_manage_appointment', - resource_type=obj_type, resource_pk=connector.pk, - apiuser=apiuser) + apiuser = ApiUser.objects.create(username='apiuser', keytype='API', key='apiuser') + AccessRight.objects.create( + codename='can_manage_appointment', resource_type=obj_type, resource_pk=connector.pk, apiuser=apiuser + ) - data = {'fields': {'clicrdv_date_raw': '2017-01-01' , 'clicrdv_time_raw': '12:00:00', - 'clicrdv_fiche_str10': 'Test', - 'firstname': 'Foo', 'lastname': 'Bar', 'email': 'foobar@example.com'}} + data = { + 'fields': { + 'clicrdv_date_raw': '2017-01-01', + 'clicrdv_time_raw': '12:00:00', + 'clicrdv_fiche_str10': 'Test', + 'firstname': 'Foo', + 'lastname': 'Bar', + 'email': 'foobar@example.com', + } + } resp = app.post_json('/clicrdv/test/interventions/63258/create?apikey=apiuser', params=data).json assert resp['data'] assert not resp['data']['success'] diff --git a/tests/test_cmis.py b/tests/test_cmis.py index cece5b27..8fbbe427 100644 --- a/tests/test_cmis.py +++ b/tests/test_cmis.py @@ -33,23 +33,29 @@ def b64encode(content): def setup(db): api = ApiUser.objects.create(username='all', keytype='', key='') conn = CmisConnector.objects.create( - cmis_endpoint='http://example.com/cmisatom', username='admin', password='admin', - slug='slug-cmis') + cmis_endpoint='http://example.com/cmisatom', username='admin', password='admin', slug='slug-cmis' + ) obj_type = ContentType.objects.get_for_model(conn) AccessRight.objects.create( - codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=conn.pk) + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=conn.pk + ) return conn def test_uploadfile(app, setup, tmpdir, monkeypatch): - class FakeCMISGateway(object): - def __init__(self, *args, **kwargs): pass - def create_doc(self, file_name, file_path, file_byte_content, - content_type=None, object_type=None, properties=None): + def create_doc( + self, + file_name, + file_path, + file_byte_content, + content_type=None, + object_type=None, + properties=None, + ): assert content_type == "image/jpeg" with open(file_name, 'wb') as f: f.write(file_byte_content) @@ -59,13 +65,15 @@ def test_uploadfile(app, setup, tmpdir, monkeypatch): file_content = 'aaaa' monkeypatch.chdir(tmpdir) import passerelle.apps.cmis.models + monkeypatch.setattr(passerelle.apps.cmis.models, 'CMISGateway', FakeCMISGateway) response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": "/some/folder/structure", - "file": {"filename": file_name, - "content": b64encode(file_content), - "content_type": "image/jpeg"}}) + params={ + "path": "/some/folder/structure", + "file": {"filename": file_name, "content": b64encode(file_content), "content_type": "image/jpeg"}, + }, + ) result_file = py.path.local(file_name) assert result_file.exists() with result_file.open('rb'): @@ -77,11 +85,12 @@ def test_uploadfile(app, setup, tmpdir, monkeypatch): file_name_overwrite = "testfile.whatever.overwrite" response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": "/some/folder/structure", - "file": {"filename": file_name, - "content": b64encode(file_content), - "content_type": "image/jpeg"}, - "filename": file_name_overwrite}) + params={ + "path": "/some/folder/structure", + "file": {"filename": file_name, "content": b64encode(file_content), "content_type": "image/jpeg"}, + "filename": file_name_overwrite, + }, + ) result_file = py.path.local(file_name_overwrite) assert result_file.exists() with result_file.open('rb'): @@ -92,25 +101,26 @@ def test_uploadfile(app, setup, tmpdir, monkeypatch): def test_upload_file_metadata(app, setup, monkeypatch): - class FakeFolder: def createDocument(self, filename, contentFile, properties, contentType=None): return Mock(properties=properties) from passerelle.apps.cmis.models import CMISGateway + monkeypatch.setattr(CMISGateway, '_get_or_create_folder', lambda x, y: FakeFolder()) response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": "/some/folder/structure", - "file": {"filename": "bla", - "content": b64encode('bla')}, - "object_type": "D:dui:type", - "properties": { - "cmis:description": "Coucou", - "dui:tnumDossier": "42", - }, - "properties/dui:ttypeStructure": "Accueil de loisirs", - }) + params={ + "path": "/some/folder/structure", + "file": {"filename": "bla", "content": b64encode('bla')}, + "object_type": "D:dui:type", + "properties": { + "cmis:description": "Coucou", + "dui:tnumDossier": "42", + }, + "properties/dui:ttypeStructure": "Accueil de loisirs", + }, + ) assert response.json['data']['properties'] == { "cmis:objectTypeId": "D:dui:type", "cmis:description": "Coucou", @@ -122,10 +132,12 @@ def test_upload_file_metadata(app, setup, monkeypatch): def test_uploadfile_error_if_no_file_name(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": "/some/folder/structure", - "file": {"content": b64encode('aaaa'), - "content_type": "image/jpeg"}}, - expect_errors=True) + params={ + "path": "/some/folder/structure", + "file": {"content": b64encode('aaaa'), "content_type": "image/jpeg"}, + }, + expect_errors=True, + ) assert response.status_code == 400 assert response.json['err'] == 1 assert response.json['err_desc'].startswith('"filename" or "file[\'filename\']" is required') @@ -134,21 +146,25 @@ def test_uploadfile_error_if_no_file_name(app, setup): def test_uploadfile_error_if_non_string_file_name(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": "/some/folder/structure", - "file": {"filename": 1, "content": b64encode('aaaa'), - "content_type": "image/jpeg"}}, - expect_errors=True) + params={ + "path": "/some/folder/structure", + "file": {"filename": 1, "content": b64encode('aaaa'), "content_type": "image/jpeg"}, + }, + expect_errors=True, + ) assert response.status_code == 400 assert response.json['err'] == 1 assert response.json['err_desc'] == "file/filename: 1 is not of type 'string'" response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": "/some/folder/structure", - "file": {"content": b64encode('aaaa'), - "content_type": "image/jpeg"}, - "filename": 1}, - expect_errors=True) + params={ + "path": "/some/folder/structure", + "file": {"content": b64encode('aaaa'), "content_type": "image/jpeg"}, + "filename": 1, + }, + expect_errors=True, + ) assert response.status_code == 400 assert response.json['err'] == 1 assert response.json['err_desc'] == "filename: 1 is not of type 'string'" @@ -157,21 +173,25 @@ def test_uploadfile_error_if_non_string_file_name(app, setup): def test_uploadfile_error_if_non_valid_file_name(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": "/some/folder/structure", - "file": {"filename": ",.,", "content": b64encode('aaaa'), - "content_type": "image/jpeg"}}, - expect_errors=True) + params={ + "path": "/some/folder/structure", + "file": {"filename": ",.,", "content": b64encode('aaaa'), "content_type": "image/jpeg"}, + }, + expect_errors=True, + ) assert response.status_code == 400 assert response.json['err'] == 1 assert "',.,' does not match " in response.json['err_desc'] response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": "/some/folder/structure", - "file": {"content": b64encode('aaaa'), - "content_type": "image/jpeg"}, - "filename": ",.,"}, - expect_errors=True) + params={ + "path": "/some/folder/structure", + "file": {"content": b64encode('aaaa'), "content_type": "image/jpeg"}, + "filename": ",.,", + }, + expect_errors=True, + ) assert response.status_code == 400 assert response.json['err'] == 1 assert "',.,' does not match " in response.json['err_desc'] @@ -180,9 +200,11 @@ def test_uploadfile_error_if_non_valid_file_name(app, setup): def test_uploadfile_error_if_no_path(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"file": {"filename": 'somefile.txt', "content": b64encode('aaaa'), - "content_type": "image/jpeg"}}, - expect_errors=True) + params={ + "file": {"filename": 'somefile.txt', "content": b64encode('aaaa'), "content_type": "image/jpeg"} + }, + expect_errors=True, + ) assert response.status_code == 400 assert response.json['err'] == 1 assert response.json['err_desc'] == "'path' is a required property" @@ -191,10 +213,12 @@ def test_uploadfile_error_if_no_path(app, setup): def test_uploadfile_error_if_non_string_path(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": 1, - "file": {"filename": 'somefile.txt', "content": b64encode('aaaa'), - "content_type": "image/jpeg"}}, - expect_errors=True) + params={ + "path": 1, + "file": {"filename": 'somefile.txt', "content": b64encode('aaaa'), "content_type": "image/jpeg"}, + }, + expect_errors=True, + ) assert response.status_code == 400 assert response.json['err'] == 1 assert response.json['err_desc'] == "path: 1 is not of type 'string'" @@ -203,10 +227,12 @@ def test_uploadfile_error_if_non_string_path(app, setup): def test_uploadfile_error_if_no_regular_path(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": "no/leading/slash", - "file": {"filename": 'somefile.txt', "content": b64encode('aaaa'), - "content_type": "image/jpeg"}}, - expect_errors=True) + params={ + "path": "no/leading/slash", + "file": {"filename": 'somefile.txt', "content": b64encode('aaaa'), "content_type": "image/jpeg"}, + }, + expect_errors=True, + ) assert response.status_code == 400 assert response.json['err'] == 1 assert "'no/leading/slash' does not match " in response.json['err_desc'] @@ -215,9 +241,12 @@ def test_uploadfile_error_if_no_regular_path(app, setup): def test_uploadfile_error_if_no_file_content(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": "/some/folder/structure", - "file": {"filename": 'somefile.txt', "content_type": "image/jpeg"}}, - expect_errors=True) + params={ + "path": "/some/folder/structure", + "file": {"filename": 'somefile.txt', "content_type": "image/jpeg"}, + }, + expect_errors=True, + ) assert response.status_code == 400 assert response.json['err'] == 1 assert response.json['err_desc'] == "file: 'content' is a required property" @@ -226,9 +255,12 @@ def test_uploadfile_error_if_no_file_content(app, setup): def test_uploadfile_error_if_non_string_file_content(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": "/some/folder/structure", - "file": {"filename": 'somefile.txt', "content": 1, "content_type": "image/jpeg"}}, - expect_errors=True) + params={ + "path": "/some/folder/structure", + "file": {"filename": 'somefile.txt', "content": 1, "content_type": "image/jpeg"}, + }, + expect_errors=True, + ) assert response.status_code == 400 assert response.json['err'] == 1 assert response.json['err_desc'] == "file/content: 1 is not of type 'string'" @@ -237,9 +269,12 @@ def test_uploadfile_error_if_non_string_file_content(app, setup): def test_uploadfile_error_if_no_proper_base64_encoding(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": "/some/folder/structure", - "file": {"filename": 'somefile.txt', "content": "1", "content_type": "image/jpeg"}}, - expect_errors=True) + params={ + "path": "/some/folder/structure", + "file": {"filename": 'somefile.txt', "content": "1", "content_type": "image/jpeg"}, + }, + expect_errors=True, + ) assert response.status_code == 400 assert response.json['err'] == 1 assert response.json['err_desc'].startswith('"file[\'content\']" must be a valid base64 string') @@ -249,11 +284,13 @@ def test_upload_file_error_metadata(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": "/some/folder/structure", - "file": {"filename": "bla", - "content": b64encode('bla')}, - "properties": {"dui:tnumDossier": "42"}}, - expect_errors=True) + params={ + "path": "/some/folder/structure", + "file": {"filename": "bla", "content": b64encode('bla')}, + "properties": {"dui:tnumDossier": "42"}, + }, + expect_errors=True, + ) assert response.status_code == 400 assert response.json['err'] == 1 assert 'object_type' in response.json['err_desc'] @@ -261,16 +298,20 @@ def test_upload_file_error_metadata(app, setup): def test_uploadfile_cmis_gateway_error(app, setup, monkeypatch): from passerelle.utils.jsonresponse import APIError + cmis_gateway = Mock() cmis_gateway.create_doc.side_effect = APIError("some error") cmis_gateway_cls = Mock(return_value=cmis_gateway) import passerelle.apps.cmis.models + monkeypatch.setattr(passerelle.apps.cmis.models, 'CMISGateway', cmis_gateway_cls) response = app.post_json( '/cmis/slug-cmis/uploadfile', - params={"path": "/some/folder/structure", - "file": {"filename": "file_name", "content": b64encode('aaaa'), - "content_type": "image/jpeg"}}) + params={ + "path": "/some/folder/structure", + "file": {"filename": "file_name", "content": b64encode('aaaa'), "content_type": "image/jpeg"}, + }, + ) assert response.json['err'] == 1 assert response.json['err_desc'].startswith("some error") @@ -278,9 +319,9 @@ def test_uploadfile_cmis_gateway_error(app, setup, monkeypatch): def test_get_or_create_folder_already_existing(monkeypatch): default_repository = Mock() default_repository.getObjectByPath.return_value = 'folder' - cmis_client_cls = Mock( - return_value=Mock(spec=CmisClient, defaultRepository=default_repository)) + cmis_client_cls = Mock(return_value=Mock(spec=CmisClient, defaultRepository=default_repository)) import passerelle.apps.cmis.models + monkeypatch.setattr(passerelle.apps.cmis.models, 'CmisClient', cmis_client_cls) gateway = passerelle.apps.cmis.models.CMISGateway('cmis_endpoint', 'user', 'pass', Mock()) assert gateway._get_or_create_folder('/whatever') == 'folder' @@ -291,10 +332,11 @@ def test_get_or_create_folder_one_level_creation(monkeypatch): root_folder = Mock() root_folder.createFolder.return_value = 'folder' default_repository = Mock( - rootFolder=root_folder, **{'getObjectByPath.side_effect': ObjectNotFoundException()}) - cmis_client_cls = Mock( - return_value=Mock(spec=CmisClient, defaultRepository=default_repository)) + rootFolder=root_folder, **{'getObjectByPath.side_effect': ObjectNotFoundException()} + ) + cmis_client_cls = Mock(return_value=Mock(spec=CmisClient, defaultRepository=default_repository)) import passerelle.apps.cmis.models + monkeypatch.setattr(passerelle.apps.cmis.models, 'CmisClient', cmis_client_cls) gateway = passerelle.apps.cmis.models.CMISGateway('cmis-url', 'user', 'password', Mock()) assert gateway._get_or_create_folder('/whatever') == 'folder' @@ -309,14 +351,15 @@ def test_get_or_create_folder_two_level_creation(monkeypatch): root_folder.createFolder.return_value = whatever_folder default_repository = Mock(rootFolder=root_folder) default_repository.getObjectByPath.side_effect = ObjectNotFoundException() - cmis_client_cls = Mock( - return_value=Mock(spec=CmisClient, defaultRepository=default_repository)) + cmis_client_cls = Mock(return_value=Mock(spec=CmisClient, defaultRepository=default_repository)) import passerelle.apps.cmis.models + monkeypatch.setattr(passerelle.apps.cmis.models, 'CmisClient', cmis_client_cls) gateway = passerelle.apps.cmis.models.CMISGateway('cmis_url', 'user', 'password', Mock()) assert gateway._get_or_create_folder('/whatever/man') == 'folder' default_repository.getObjectByPath.assert_has_calls( - [call('/whatever/man'), call('/whatever'), call('/whatever/man')]) + [call('/whatever/man'), call('/whatever'), call('/whatever/man')] + ) root_folder.createFolder.assert_called_once_with('whatever') whatever_folder.createFolder.assert_called_once_with('man') @@ -336,9 +379,9 @@ def test_get_or_create_folder_with_some_existing_and_some_not(monkeypatch): root_folder = Mock() default_repository = Mock(rootFolder=root_folder) default_repository.getObjectByPath.side_effect = getObjectByPath - cmis_client_cls = Mock( - return_value=Mock(spec=CmisClient, defaultRepository=default_repository)) + cmis_client_cls = Mock(return_value=Mock(spec=CmisClient, defaultRepository=default_repository)) import passerelle.apps.cmis.models + monkeypatch.setattr(passerelle.apps.cmis.models, 'CmisClient', cmis_client_cls) gateway = passerelle.apps.cmis.models.CMISGateway('cmis_url', 'user', 'password', Mock()) assert gateway._get_or_create_folder('/whatever/man') == 'folder' @@ -348,6 +391,7 @@ def test_get_or_create_folder_with_some_existing_and_some_not(monkeypatch): def test_create_doc(): from passerelle.apps.cmis.models import CMISGateway + gateway = CMISGateway('cmis_url', 'user', 'password', Mock()) folder = Mock() folder.createDocument.return_value = 'doc' @@ -360,15 +404,18 @@ def test_create_doc(): assert content_file.read() == b'file_content' -@pytest.mark.parametrize("cmis_exc,err_msg", [ - (httplib2.HttpLib2Error, "connection error"), - # FIXME used for cmslib 0.5 compat - (urllib2.URLError, "connection error"), - (PermissionDeniedException, "permission denied"), - (UpdateConflictException, "update conflict"), - (InvalidArgumentException, "invalid property"), - (CmisException, "cmis binding error") -]) +@pytest.mark.parametrize( + "cmis_exc,err_msg", + [ + (httplib2.HttpLib2Error, "connection error"), + # FIXME used for cmslib 0.5 compat + (urllib2.URLError, "connection error"), + (PermissionDeniedException, "permission denied"), + (UpdateConflictException, "update conflict"), + (InvalidArgumentException, "invalid property"), + (CmisException, "cmis binding error"), + ], +) def test_wrap_cmis_error(app, setup, monkeypatch, cmis_exc, err_msg): from passerelle.utils.jsonresponse import APIError from passerelle.apps.cmis.models import wrap_cmis_error @@ -384,6 +431,7 @@ def test_wrap_cmis_error(app, setup, monkeypatch, cmis_exc, err_msg): def test_re_file_path(): from passerelle.apps.cmis.models import FILE_PATH_PATTERN + RE_FILE_PATH = re.compile(FILE_PATH_PATTERN) assert RE_FILE_PATH.match('/') assert RE_FILE_PATH.match('/some') @@ -400,13 +448,13 @@ def test_re_file_path(): def test_re_file_name(): from passerelle.apps.cmis.models import FILE_NAME_PATTERN + RE_FILE_NAME = re.compile(FILE_NAME_PATTERN) assert RE_FILE_NAME.match('toto.tata') assert RE_FILE_NAME.match('TOTO.TATA') def test_cmis_types_view(setup, app, admin_user, monkeypatch): - class FakeCmisType: class FakeCmisProperty: def __init__(self, id): @@ -449,6 +497,7 @@ def test_cmis_types_view(setup, app, admin_user, monkeypatch): repo = FakeCmisRepo(root_types) from cmislib import CmisClient + monkeypatch.setattr(CmisClient, 'getDefaultRepository', lambda self: repo) app = login(app) resp = app.get('/cmis/slug-cmis/') @@ -482,15 +531,20 @@ def test_raw_uploadfile(mocked_request, app, setup): file_name = "test2" file_content = 'salut\n' path = "/test-eo" - url = reverse('generic-endpoint', kwargs={'connector': 'cmis', - 'endpoint': 'uploadfile', 'slug': setup.slug}) + url = reverse( + 'generic-endpoint', kwargs={'connector': 'cmis', 'endpoint': 'uploadfile', 'slug': setup.slug} + ) def cmis_mocked_request(uri, method="GET", body=None, **kwargs): """simulate the 3 (ordered) HTTP queries involved""" response = {'status': '200'} if method == 'GET' and uri == 'http://example.com/cmisatom': content = open('%s/tests/data/cmis/cmis1.out.xml' % os.getcwd(), 'rb').read() - elif method == 'GET' and uri == 'http://example.com/cmisatom/test/path?path=/test-eo&filter=&includeAllowableActions=false&includeACL=false&includePolicyIds=false&includeRelationships=&renditionFilter=': + elif ( + method == 'GET' + and uri + == 'http://example.com/cmisatom/test/path?path=/test-eo&filter=&includeAllowableActions=false&includeACL=false&includePolicyIds=false&includeRelationships=&renditionFilter=' + ): content = open('%s/tests/data/cmis/cmis2.out.xml' % os.getcwd(), 'rb').read() elif method == 'POST' and uri == 'http://example.com/cmisatom/test/children?id=L3Rlc3QtZW8%3D': expected_input = open('%s/tests/data/cmis/cmis3.in.xml' % os.getcwd(), 'r').read() @@ -520,9 +574,8 @@ def test_raw_uploadfile(mocked_request, app, setup): mocked_request.side_effect = cmis_mocked_request params = { "path": path, - "file": {"filename": file_name, - "content": b64encode(file_content), - "content_type": "image/jpeg"}} + "file": {"filename": file_name, "content": b64encode(file_content), "content_type": "image/jpeg"}, + } response = app.post_json(url, params=params) json_result = response.json assert json_result['err'] == 0 diff --git a/tests/test_cron.py b/tests/test_cron.py index df135e9c..53989677 100644 --- a/tests/test_cron.py +++ b/tests/test_cron.py @@ -6,6 +6,7 @@ from django.core.management.base import CommandError from passerelle.apps.base_adresse.models import BaseAdresse + def test_cron_frequencies(db): for frequency in ('hourly', 'daily', 'weekly', 'monthly'): call_command('cron', frequency) @@ -16,8 +17,9 @@ def test_cron_frequencies(db): def test_cron_error(db, caplog): connector = BaseAdresse.objects.create(slug='base-adresse') excep = Exception('hello') - with mock.patch('passerelle.apps.base_adresse.models.BaseResource.hourly', - new=mock.Mock(side_effect=excep)): + with mock.patch( + 'passerelle.apps.base_adresse.models.BaseResource.hourly', new=mock.Mock(side_effect=excep) + ): with pytest.raises(CommandError): call_command('cron', 'hourly') assert caplog.records[0].message == 'connector "base-adresse.base-adresse" error running hourly job' diff --git a/tests/test_cryptor.py b/tests/test_cryptor.py index f5f742ab..aa1ef162 100644 --- a/tests/test_cryptor.py +++ b/tests/test_cryptor.py @@ -63,9 +63,7 @@ z5aZ2AnUBc/xueO2ixL3ROOXYAeakrRAQ38G13ibYe2dQpv6/CTsZJOttnCErn54 @pytest.fixture def cryptor(db): - return Cryptor.objects.create(slug='test', - private_key=PRIVATE_KEY, - public_key=PUBLIC_KEY) + return Cryptor.objects.create(slug='test', private_key=PRIVATE_KEY, public_key=PUBLIC_KEY) def test_cryptor_bad_keys(db): @@ -99,19 +97,22 @@ def test_cryptor_bad_requests(app, cryptor): # full opened access api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(cryptor) - AccessRight.objects.create(codename='can_encrypt', apiuser=api, resource_type=obj_type, - resource_pk=cryptor.pk) - AccessRight.objects.create(codename='can_decrypt', apiuser=api, resource_type=obj_type, - resource_pk=cryptor.pk) + AccessRight.objects.create( + codename='can_encrypt', apiuser=api, resource_type=obj_type, resource_pk=cryptor.pk + ) + AccessRight.objects.create( + codename='can_decrypt', apiuser=api, resource_type=obj_type, resource_pk=cryptor.pk + ) endpoint = utils.generic_endpoint_url('cryptor', 'file-encrypt', slug=cryptor.slug) - for bad_payload in ('error', - {"foo": "bar"}, - ["not", "a", "dict"], - {"file": {"filename": "f", "content_type": "ct"}}, - {"file": {"filename": "f", "content_type": "ct", "content": None}}, - {"file": {"filename": "f", "content_type": "ct", "content": "NotBase64"}}, - ): + for bad_payload in ( + 'error', + {"foo": "bar"}, + ["not", "a", "dict"], + {"file": {"filename": "f", "content_type": "ct"}}, + {"file": {"filename": "f", "content_type": "ct", "content": None}}, + {"file": {"filename": "f", "content_type": "ct", "content": "NotBase64"}}, + ): resp = app.post_json(endpoint, params=bad_payload, status=400) assert resp.json['err'] == 1 @@ -123,10 +124,12 @@ def test_cryptor_bad_requests(app, cryptor): def test_cryptor_encrypt_decrypt(app, cryptor): api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(cryptor) - AccessRight.objects.create(codename='can_encrypt', apiuser=api, resource_type=obj_type, - resource_pk=cryptor.pk) - AccessRight.objects.create(codename='can_decrypt', apiuser=api, resource_type=obj_type, - resource_pk=cryptor.pk) + AccessRight.objects.create( + codename='can_encrypt', apiuser=api, resource_type=obj_type, resource_pk=cryptor.pk + ) + AccessRight.objects.create( + codename='can_decrypt', apiuser=api, resource_type=obj_type, resource_pk=cryptor.pk + ) # encrypt endpoint = utils.generic_endpoint_url('cryptor', 'file-encrypt', slug=cryptor.slug) @@ -138,7 +141,8 @@ def test_cryptor_encrypt_decrypt(app, cryptor): assert CryptedFile.objects.count() == 1 cfile = CryptedFile.objects.first() assert resp.json['data']['redirect_url'].endswith( - '/cryptor/%s/file-decrypt/%s' % (cryptor.slug, cfile.uuid)) + '/cryptor/%s/file-decrypt/%s' % (cryptor.slug, cfile.uuid) + ) cfile.delete() # encrypt with another redirect url diff --git a/tests/test_csv_datasource.py b/tests/test_csv_datasource.py index 9f0dc521..4132d269 100644 --- a/tests/test_csv_datasource.py +++ b/tests/test_csv_datasource.py @@ -79,12 +79,14 @@ def get_file_content(filename): @pytest.fixture def setup(): - def maker(columns_keynames='fam,id,lname,fname,sex', filename='data.csv', - sheet_name='Feuille2', data='', skip_header=False): - api = ApiUser.objects.create( - username='all', - keytype='', - key='') + def maker( + columns_keynames='fam,id,lname,fname,sex', + filename='data.csv', + sheet_name='Feuille2', + data='', + skip_header=False, + ): + api = ApiUser.objects.create(username='all', keytype='', key='') csv = CsvDataSource.objects.create( csv_file=File(data, filename), sheet_name=sheet_name, @@ -92,19 +94,21 @@ def setup(): slug='test', title='a title', description='a description', - skip_header=skip_header) + skip_header=skip_header, + ) assert TableRow.objects.filter(resource=csv).count() == len(csv.get_rows()) obj_type = ContentType.objects.get_for_model(csv) AccessRight.objects.create( - codename='can_access', - apiuser=api, - resource_type=obj_type, - resource_pk=csv.pk) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csv.slug, - 'endpoint': 'data', - }) + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=csv.pk + ) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csv.slug, + 'endpoint': 'data', + }, + ) return csv, url return maker @@ -123,6 +127,7 @@ def client(): def filetype(request): return request.param + @pytest.fixture def sheet_name(filetype): return 'Feuille2' if filetype != 'data.csv' else '' @@ -135,11 +140,13 @@ def file_content(filetype): def test_default_column_keynames(setup, filetype): - csvdata = CsvDataSource.objects.create(csv_file=File(get_file_content(filetype), filetype), - sheet_name='Feuille2', - slug='test2', - title='a title', - description='a description') + csvdata = CsvDataSource.objects.create( + csv_file=File(get_file_content(filetype), filetype), + sheet_name='Feuille2', + slug='test2', + title='a title', + description='a description', + ) assert len(csvdata.columns_keynames.split(',')) == 2 assert 'id' in csvdata.columns_keynames assert 'text' in csvdata.columns_keynames @@ -171,9 +178,8 @@ def test_unfiltered_data(client, setup, filetype): def test_empty_file(client, setup): csvdata, url = setup( - 'field,,another_field,', - filename='data-empty.ods', - data=get_file_content('data-empty.ods')) + 'field,,another_field,', filename='data-empty.ods', data=get_file_content('data-empty.ods') + ) resp = client.get(url) result = parse_response(resp) assert len(result) == 0 @@ -236,8 +242,7 @@ def test_data(client, setup, filetype): filters = {'text': 'Sacha'} resp = client.get(url, filters) result = parse_response(resp) - assert result[0] == {'id': '59', 'text': 'Sacha', - 'fam': '2431', 'sexe': 'H'} + assert result[0] == {'id': '59', 'text': 'Sacha', 'fam': '2431', 'sexe': 'H'} def test_unicode_filter_data(client, setup, filetype): @@ -279,8 +284,7 @@ def test_multi_filter(client, setup, filetype): filters = {'sexe': 'F'} resp = client.get(url, filters) result = parse_response(resp) - assert result[0] == {'id': '6', 'text': 'Shanone', - 'fam': '525', 'sexe': 'F'} + assert result[0] == {'id': '6', 'text': 'Shanone', 'fam': '525', 'sexe': 'F'} assert len(result) == 10 @@ -324,7 +328,7 @@ def test_dialect(client, setup): 'quoting': 0, 'delimiter': ';', 'quotechar': '"', - 'doublequote': False + 'doublequote': False, } assert expected == csvdata.dialect_options @@ -380,11 +384,14 @@ def test_cache_new_shorter_file(client, setup): def test_query_array(app, setup, filetype): csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata, structure='array') query.projections = '\n'.join(['id:int(id)', 'prenom:prenom']) query.save() @@ -398,13 +405,15 @@ def test_query_array(app, setup, filetype): def test_query_q_filter(app, setup, filetype): - csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, - data=get_file_content(filetype)) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata, structure='array') query.projections = '\n'.join(['id:int(id)', 'text:prenom']) query.save() @@ -424,11 +433,14 @@ def test_query_q_filter(app, setup, filetype): def test_query_dict(app, setup, filetype): csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata, structure='dict') query.projections = '\n'.join(['id:int(id)', 'prenom:prenom']) query.save() @@ -443,11 +455,14 @@ def test_query_dict(app, setup, filetype): def test_query_tuples(app, setup, filetype): csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata, structure='tuples') query.projections = '\n'.join(['id:int(id)', 'prenom:prenom']) query.save() @@ -464,11 +479,14 @@ def test_query_tuples(app, setup, filetype): def test_query_onerow(app, setup, filetype): csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata, structure='onerow') query.projections = '\n'.join(['id:int(id)', 'prenom:prenom']) query.filters = 'int(id) == 525' @@ -481,11 +499,14 @@ def test_query_onerow(app, setup, filetype): def test_query_one(app, setup, filetype): csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata, structure='one') query.projections = 'wtf:prenom' query.filters = 'int(id) == 525' @@ -497,11 +518,14 @@ def test_query_one(app, setup, filetype): def test_query_filter_param(app, setup, filetype): csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata, structure='one') query.projections = 'wtf:prenom' query.filters = 'int(id) == int(query.get("foobar"))' @@ -519,11 +543,14 @@ def test_query_filter_param(app, setup, filetype): def test_query_distinct(app, setup, filetype): csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata, distinct='sexe') query.save() response = app.get(url) @@ -534,11 +561,14 @@ def test_query_distinct(app, setup, filetype): def test_query_keyed_distinct(app, setup, filetype): csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata, distinct='nom', structure='keyed-distinct') query.save() response = app.get(url) @@ -549,43 +579,50 @@ def test_query_keyed_distinct(app, setup, filetype): def test_query_order(app, setup, filetype): csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata, order='prenom.lower()') query.save() response = app.get(url) assert response.json['err'] == 0 assert isinstance(response.json['data'], list) - assert response.json['data'] == sorted(response.json['data'], key=lambda row: - row['prenom'].lower()) + assert response.json['data'] == sorted(response.json['data'], key=lambda row: row['prenom'].lower()) def test_query_order_missing_column(app, setup): csvdata, url = setup(data=StringIO(data + '42;42;STOPHERE\n')) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata, order='fname') query.save() response = app.get(url) assert response.json['err'] == 0 assert isinstance(response.json['data'], list) - assert response.json['data'] == sorted(response.json['data'], key=lambda row: - row['fname']) + assert response.json['data'] == sorted(response.json['data'], key=lambda row: row['fname']) def test_query_error(app, setup, filetype): csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) response = app.get(url) assert response.json['err'] == 1 @@ -723,17 +760,22 @@ def test_download_file(app, setup, filetype, admin_user): elif filetype == 'data.xls': assert resp.headers['Content-Type'] == 'application/vnd.ms-excel' elif filetype == 'data.xlsx': - assert resp.headers['Content-Type'] == 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' + assert ( + resp.headers['Content-Type'] + == 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' + ) def test_query_filter_multiline(app, setup, filetype): - csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, - data=get_file_content(filetype)) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata) query.filters = '\n'.join(['int(id) <= 525', 'int(id) >= 511']) query.save() @@ -741,14 +783,17 @@ def test_query_filter_multiline(app, setup, filetype): assert response.json['err'] == 0 assert len(response.json['data']) == 2 + def test_query_builtin_id_filter(app, setup, filetype): - csvdata, _url = setup('id,whatever,nom,prenom,sexe', filename=filetype, - data=get_file_content(filetype)) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + csvdata, _url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata) query.save() @@ -765,6 +810,7 @@ def test_query_builtin_id_filter(app, setup, filetype): assert app.get(url + '?id=121').json['data'][0]['prenom'] == 'Eliot' assert len(app.get(url + '?id=525').json['data']) == 0 + def test_delete_connector_query(admin_user, app, setup, filetype): csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) url = reverse('view-connector', kwargs={'connector': 'csvdatasource', 'slug': csvdata.slug}) @@ -823,27 +869,32 @@ def test_change_csv_command(setup): assert list(csv.get_rows()) != [] -@pytest.mark.parametrize('payload,expected', [ - ({}, 20), - ({'limit': 10}, 10), - ({'limit': 10, 'offset': 0}, 10), - ({'limit': 10, 'offset': 15}, 5), - ({'limit': 10, 'offset': 42}, 0), -]) +@pytest.mark.parametrize( + 'payload,expected', + [ + ({}, 20), + ({'limit': 10}, 10), + ({'limit': 10, 'offset': 0}, 10), + ({'limit': 10, 'offset': 15}, 5), + ({'limit': 10, 'offset': 42}, 0), + ], +) def test_pagination(app, setup, filetype, payload, expected): - csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, - data=get_file_content(filetype)) + csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) # data endpoint response = app.get(url + '?' + urlencode(payload)) assert len(response.json['data']) == expected # query endpoint - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata, structure='array') query.projections = '\n'.join(['id:int(id)', 'text:prenom']) query.save() @@ -851,16 +902,18 @@ def test_pagination(app, setup, filetype, payload, expected): assert len(response.json['data']) == expected -@pytest.mark.parametrize('payload,expected_error', [ - ({'limit': 'bla'}, 'invalid limit parameter'), - ({'limit': 0}, 'invalid limit parameter'), - ({'limit': -1}, 'invalid limit parameter'), - ({'limit': 10, 'offset': 'bla'}, 'invalid offset parameter'), - ({'limit': 10, 'offset': -1}, 'invalid offset parameter'), -]) +@pytest.mark.parametrize( + 'payload,expected_error', + [ + ({'limit': 'bla'}, 'invalid limit parameter'), + ({'limit': 0}, 'invalid limit parameter'), + ({'limit': -1}, 'invalid limit parameter'), + ({'limit': 10, 'offset': 'bla'}, 'invalid offset parameter'), + ({'limit': 10, 'offset': -1}, 'invalid offset parameter'), + ], +) def test_pagination_error(app, setup, filetype, payload, expected_error): - csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, - data=get_file_content(filetype)) + csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) # data endpoint response = app.get(url + '?' + urlencode(payload)) @@ -868,11 +921,14 @@ def test_pagination_error(app, setup, filetype, payload, expected_error): assert response.json['err_desc'] == expected_error # query endpoint - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csvdata.slug, - 'endpoint': 'query/query-1_/', - }) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csvdata.slug, + 'endpoint': 'query/query-1_/', + }, + ) query = Query(slug='query-1_', resource=csvdata, structure='array') query.projections = '\n'.join(['id:int(id)', 'text:prenom']) query.save() @@ -880,15 +936,17 @@ def test_pagination_error(app, setup, filetype, payload, expected_error): assert response.json['err'] == 1 assert response.json['err_desc'] == expected_error + def test_csv_dst(app, setup, admin_user): csvdata, url = setup( - 'field,,another_field,', - filename='data-empty.ods', - data=get_file_content('data-empty.ods')) + 'field,,another_field,', filename='data-empty.ods', data=get_file_content('data-empty.ods') + ) with mock.patch('os.fstat') as mocked_fstat, override_settings(TIME_ZONE='Europe/Paris'): + class MockStatResult: st_ctime = time.mktime((2019, 10, 27, 2, 20, 50, 0, 0, 0)) + mocked_fstat.return_value = MockStatResult() # visit endpoint @@ -929,11 +987,13 @@ def test_view_manage_create(app, admin_user, filetype, file_content, sheet_name) def test_csv_daily_clean(settings, remove_files): settings.CSVDATASOURCE_REMOVE_ON_CLEAN = remove_files - csvdata = CsvDataSource.objects.create(csv_file=File(StringIO('a;z;e;r;t;y'), 'data.csv'), - sheet_name='Feuille2', - slug='test-%s' % str(uuid.uuid4()), - title='a title', - description='a description') + csvdata = CsvDataSource.objects.create( + csv_file=File(StringIO('a;z;e;r;t;y'), 'data.csv'), + sheet_name='Feuille2', + slug='test-%s' % str(uuid.uuid4()), + title='a title', + description='a description', + ) old_dir = os.path.join(os.path.join(settings.MEDIA_ROOT), 'csv') os.makedirs(old_dir) csvdata_dir = os.path.dirname(csvdata.csv_file.path) diff --git a/tests/test_dpark.py b/tests/test_dpark.py index 58d2150e..b4044f2c 100644 --- a/tests/test_dpark.py +++ b/tests/test_dpark.py @@ -47,20 +47,22 @@ def make_response(name, items): return ET.tostring(root) - @pytest.fixture def dpark(db): - resource = make_resource(DPark, **{ - 'slug': SLUG, - 'wsdl_url': WSDL_URL, - 'operation_url': OPERATIONAL_URL, - }) + resource = make_resource( + DPark, + **{ + 'slug': SLUG, + 'wsdl_url': WSDL_URL, + 'operation_url': OPERATIONAL_URL, + }, + ) resource.mock_requests = [] resource.mock_responses = [] @httmock.urlmatch(scheme='https', netloc='example.net', path='/dpark', query='wsdl', method='GET') def wsdl(url, request): - return WSDL_CONTENT + return WSDL_CONTENT @httmock.urlmatch(scheme='https', netloc='example.net', path='/dpark', method='POST') def request(url, request): @@ -83,7 +85,6 @@ class WebFaultHavingLatin1(WebFault): class MockedService(object): - def __init__(self, success, error_class, replydata): self.success = success self.error_class = error_class @@ -156,7 +157,9 @@ def test_search(dpark, app): assert resp.json['err'] == 1 assert resp.json['code'] == 'unknown-file' # invalid cardnumber - client.return_value = get_client(replydata={'CodeRetour': '04', 'MessageRetour': u'Numéro de support inconnu'}) + client.return_value = get_client( + replydata={'CodeRetour': '04', 'MessageRetour': u'Numéro de support inconnu'} + ) resp = app.get('/dpark/test/search/', params=params) assert resp.json['err'] == 1 assert resp.json['code'] == 'support-number-unknown' @@ -182,7 +185,9 @@ def test_link(dpark, app): assert resp.json['err'] == 1 assert resp.json['code'] == 'unknown-file' # invalid cardnumber - client.return_value = get_client(replydata={'CodeRetour': '04', 'MessageRetour': u'Numéro de support inconnu'}) + client.return_value = get_client( + replydata={'CodeRetour': '04', 'MessageRetour': u'Numéro de support inconnu'} + ) resp = app.post_json('/dpark/test/link/', params=params) assert resp.json['err'] == 1 assert resp.json['code'] == 'support-number-unknown' @@ -210,28 +215,31 @@ def test_unlink(dpark, app): assert resp.json['err_desc'] == 'No pairing exists' # pairing exist Pairing.objects.create( - resource=dpark, nameid=nameid, lastname='bar', - firstnames='foo', filenumber='1' * 9, badgenumber='2' * 9, - cardnumber='3' * 9) + resource=dpark, + nameid=nameid, + lastname='bar', + firstnames='foo', + filenumber='1' * 9, + badgenumber='2' * 9, + cardnumber='3' * 9, + ) resp = app.post_json('/dpark/test/unlink/', params=params) assert resp.json['data'] is True assert Pairing.objects.count() == 0 def test_address_eligibility(dpark, app): - params = { - 'address_sticode': '315553637461', - 'address_zipcode': '44000' - } + params = {'address_sticode': '315553637461', 'address_zipcode': '44000'} # missing required parameter app.get('/dpark/test/address-eligibility/', params=params, status=400) # not eligible dpark.mock_responses.append( - make_response('ELIGADR', - ( - ('CodeRetour', '99'), - ('MessageRetour', 'Erreur extension'), - ) + make_response( + 'ELIGADR', + ( + ('CodeRetour', '99'), + ('MessageRetour', 'Erreur extension'), + ), ) ) params['address_locality'] = 'Nantes' @@ -240,21 +248,22 @@ def test_address_eligibility(dpark, app): assert resp.json['err_desc'] == 'Erreur extension' # eligible dpark.mock_responses.append( - make_response('ELIGADR', - ( - ('CodeRetour', '01'), - ('MessageRetour', 'Elligible'), - ('Adresse_EtageEscalierAppartement', ''), - ('Adresse_ImmeubleBatimentResidence', ''), - ('Adresse_NumeroVoie', '13'), - ('Adresse_Extension', '1'), - ('Adresse_NomVoie', 'Rue des Abeilles'), - ('Adresse_CodeSTI', '315550016038'), - ('Adresse_BoitePostaleLieudit', ''), - ('Adresse_CodePostal', '31000'), - ('Adresse_Localite', 'Toulouse'), - ('Adresse_Quartier', 'Dupuy'), - ) + make_response( + 'ELIGADR', + ( + ('CodeRetour', '01'), + ('MessageRetour', 'Elligible'), + ('Adresse_EtageEscalierAppartement', ''), + ('Adresse_ImmeubleBatimentResidence', ''), + ('Adresse_NumeroVoie', '13'), + ('Adresse_Extension', '1'), + ('Adresse_NomVoie', 'Rue des Abeilles'), + ('Adresse_CodeSTI', '315550016038'), + ('Adresse_BoitePostaleLieudit', ''), + ('Adresse_CodePostal', '31000'), + ('Adresse_Localite', 'Toulouse'), + ('Adresse_Quartier', 'Dupuy'), + ), ) ) params['address_streetext'] = 1 @@ -270,8 +279,12 @@ def test_subscriber_infos(dpark, app): nameid = 'abcd' * 8 url = '/dpark/test/infos/%s/' % nameid params = { - 'nameid': nameid, 'firstnames': 'spam eggs', 'lastname': 'bar', - 'filenumber': '1' * 9, 'badgenumber': '2' * 9} + 'nameid': nameid, + 'firstnames': 'spam eggs', + 'lastname': 'bar', + 'filenumber': '1' * 9, + 'badgenumber': '2' * 9, + } Pairing.objects.create(resource=dpark, **params) # unknown subscriber resp = app.get('/dpark/test/infos/toto/') @@ -282,7 +295,8 @@ def test_subscriber_infos(dpark, app): assert resp.json['data'] == [] # known file replydata = { - 'CodeRetour': '01', 'MessageRetour': 'Dossier existant', + 'CodeRetour': '01', + 'MessageRetour': 'Dossier existant', "Adresse_BoitePostaleLieuDit": None, "Adresse_CodePostal": "44000", "Adresse_CodeSTI": "315553609651", @@ -308,7 +322,7 @@ def test_subscriber_infos(dpark, app): "Demandeur_NomUsuel": "BAR", "Demandeur_Prenom": "Foo Spam", "Demandeur_TelephoneFixe": "0611111111", - "Demandeur_TelephonePortable": None + "Demandeur_TelephonePortable": None, } client.return_value = get_client(replydata=replydata) resp = app.get(url) @@ -325,10 +339,18 @@ def test_subscriber_infos(dpark, app): assert data[0]['demande_datefinabo'] == '2019-06-24' # mutiple pairing - Pairing.objects.create(resource=dpark, nameid=nameid, firstnames='monty', lastname='eggs', filenumber='5' * 9, - badgenumber='6' * 9, cardnumber='7' * 9) + Pairing.objects.create( + resource=dpark, + nameid=nameid, + firstnames='monty', + lastname='eggs', + filenumber='5' * 9, + badgenumber='6' * 9, + cardnumber='7' * 9, + ) replydata2 = { - 'CodeRetour': '01', 'MessageRetour': 'Dossier existant', + 'CodeRetour': '01', + 'MessageRetour': 'Dossier existant', "Adresse_BoitePostaleLieuDit": None, "Adresse_CodePostal": "94000", "Adresse_CodeSTI": "315553609651", @@ -354,12 +376,10 @@ def test_subscriber_infos(dpark, app): "Demandeur_NomUsuel": "EGGS", "Demandeur_Prenom": "Monty", "Demandeur_TelephoneFixe": "0611111111", - "Demandeur_TelephonePortable": None + "Demandeur_TelephonePortable": None, } # there will be only one call as first pairing is now cached - client.side_effect = [ - get_client(replydata=replydata2) - ] + client.side_effect = [get_client(replydata=replydata2)] resp = app.get(url) data = resp.json['data'] assert len(data) == 2 @@ -400,10 +420,10 @@ def test_subscriber_infos(dpark, app): def test_check_renewal_time(dpark, app): with mock.patch('passerelle.contrib.dpark.models.get_client') as client: url = '/dpark/test/check-renewal-time/' - params = { - 'firstnames': 'spam eggs', 'lastname': 'bar', - 'filenumber': '1' * 9, 'badgenumber': '2' * 9} - client.return_value = get_client(replydata={'CodeRetour': '02', 'MessageRetour': u'Renouvellement hors délai'}) + params = {'firstnames': 'spam eggs', 'lastname': 'bar', 'filenumber': '1' * 9, 'badgenumber': '2' * 9} + client.return_value = get_client( + replydata={'CodeRetour': '02', 'MessageRetour': u'Renouvellement hors délai'} + ) resp = app.get(url, params=params) assert resp.json['data'] is False assert resp.json['desc'] == u'Renouvellement hors délai' @@ -415,10 +435,10 @@ def test_check_renewal_time(dpark, app): def test_check_renewal_duplicate(dpark, app): with mock.patch('passerelle.contrib.dpark.models.get_client') as client: url = '/dpark/test/check-renewal-time/' - params = { - 'firstnames': 'spam eggs', 'lastname': 'bar', - 'filenumber': '1' * 9, 'badgenumber': '2' * 9} - client.return_value = get_client(replydata={'CodeRetour': '02', 'MessageRetour': u'Demande déjà en cours'}) + params = {'firstnames': 'spam eggs', 'lastname': 'bar', 'filenumber': '1' * 9, 'badgenumber': '2' * 9} + client.return_value = get_client( + replydata={'CodeRetour': '02', 'MessageRetour': u'Demande déjà en cours'} + ) resp = app.get(url, params=params) assert resp.json['data'] is False assert resp.json['desc'] == u'Demande déjà en cours' @@ -432,11 +452,17 @@ def test_check_creation_duplicate(dpark, app): with mock.patch('passerelle.contrib.dpark.models.get_client') as client: url = '/dpark/test/check-creation-duplicate/' params = { - 'address_district': 'PERI', 'address_locality': 'Toulouse', - 'address_sticode': '315553609651', 'address_streetext': '1', - 'address_zipcode': '31000', 'applicant_firstnames': 'Spam', - 'applicant_lastname': 'Ham'} - client.return_value = get_client(replydata={'CodeRetour': '02', 'MessageRetour': u'Demande déjà en cours'}) + 'address_district': 'PERI', + 'address_locality': 'Toulouse', + 'address_sticode': '315553609651', + 'address_streetext': '1', + 'address_zipcode': '31000', + 'applicant_firstnames': 'Spam', + 'applicant_lastname': 'Ham', + } + client.return_value = get_client( + replydata={'CodeRetour': '02', 'MessageRetour': u'Demande déjà en cours'} + ) resp = app.get(url, params=params) assert resp.json['data'] is False assert resp.json['desc'] == u'Demande déjà en cours' @@ -450,11 +476,17 @@ def test_check_creation_not_renewal(dpark, app): with mock.patch('passerelle.contrib.dpark.models.get_client') as client: url = '/dpark/test/check-creation-not-renewal/' params = { - 'address_district': 'PERI', 'address_locality': 'Toulouse', - 'address_sticode': '315553609651', 'address_streetext': '1', - 'address_zipcode': '31000', 'applicant_firstnames': 'Spam', - 'applicant_lastname': 'Ham'} - client.return_value = get_client(replydata={'CodeRetour': '02', 'MessageRetour': u'Usager existe déjà dans D-Park'}) + 'address_district': 'PERI', + 'address_locality': 'Toulouse', + 'address_sticode': '315553609651', + 'address_streetext': '1', + 'address_zipcode': '31000', + 'applicant_firstnames': 'Spam', + 'applicant_lastname': 'Ham', + } + client.return_value = get_client( + replydata={'CodeRetour': '02', 'MessageRetour': u'Usager existe déjà dans D-Park'} + ) resp = app.get(url, params=params) assert resp.json['data'] is False assert resp.json['desc'] == u'Usager existe déjà dans D-Park' @@ -471,17 +503,24 @@ def test_get_payment_infos(dpark, app): assert resp.json['err'] == 1 assert resp.json['err_desc'] == 'No pairing exists' params = { - 'nameid': nameid, 'firstnames': 'spam eggs', 'lastname': 'bar', - 'filenumber': '1' * 9, 'badgenumber': '2' * 9} + 'nameid': nameid, + 'firstnames': 'spam eggs', + 'lastname': 'bar', + 'filenumber': '1' * 9, + 'badgenumber': '2' * 9, + } Pairing.objects.create(resource=dpark, **params) client.return_value = get_client(replydata={'CodeRetour': '02', 'MessageRetour': u'Dossier inconnu'}) resp = app.get(url) assert resp.json['err'] == 0 assert resp.json['data'] == [] replydata = { - 'CodeRetour': '01', 'MessageRetour': u'Demande acceptée', - 'NumeroDemande': '55555', 'Montant': 12500, 'TypePaiement': 10, - 'NumeroTeledossierPhase1': 'E-8-0AA666BB' + 'CodeRetour': '01', + 'MessageRetour': u'Demande acceptée', + 'NumeroDemande': '55555', + 'Montant': 12500, + 'TypePaiement': 10, + 'NumeroTeledossierPhase1': 'E-8-0AA666BB', } client.return_value = get_client(replydata=replydata) resp = app.get(url) @@ -492,11 +531,14 @@ def test_get_payment_infos(dpark, app): assert data['numerodemande'] == '55555' -@pytest.mark.parametrize('transaction_datetime,expected_date', [ - ('20180611', '20180611'), - # UTC datetime should be converted to Europe/Paris date - ('2018-06-11T23:59:00', '20180612') -]) +@pytest.mark.parametrize( + 'transaction_datetime,expected_date', + [ + ('20180611', '20180611'), + # UTC datetime should be converted to Europe/Paris date + ('2018-06-11T23:59:00', '20180612'), + ], +) def test_payment_notification(dpark, app, transaction_datetime, expected_date): operation = mock.Mock(name='PLS_NOTIFCB') service = mock.Mock(spec=['PLS_NOTIFCB'], PLS_NOTIFCB=operation) @@ -506,17 +548,27 @@ def test_payment_notification(dpark, app, transaction_datetime, expected_date): nameid = 'abcd' * 8 filenumber = '1' * 9 params = { - 'nameid': nameid, 'filenumber': filenumber, 'transaction_id': 'I123456789', - 'transaction_datetime': transaction_datetime, 'total_amount': '125', - 'application_id': '61718', 'application_external_id': 'E-8-N5UTAK6P' + 'nameid': nameid, + 'filenumber': filenumber, + 'transaction_id': 'I123456789', + 'transaction_datetime': transaction_datetime, + 'total_amount': '125', + 'application_id': '61718', + 'application_external_id': 'E-8-N5UTAK6P', } url = '/dpark/test/notify-payment/' resp = app.post_json(url, params=params) assert resp.json['err'] == 1 assert resp.json['err_desc'] == 'No pairing exists' - Pairing.objects.create(resource=dpark, **{ - 'nameid': nameid, 'firstnames': 'spam eggs', 'lastname': 'bar', - 'filenumber': filenumber, 'badgenumber': '2' * 9} + Pairing.objects.create( + resource=dpark, + **{ + 'nameid': nameid, + 'firstnames': 'spam eggs', + 'lastname': 'bar', + 'filenumber': filenumber, + 'badgenumber': '2' * 9, + }, ) operation.return_value = mock.Mock(CodeRetour='02', MessageRetour=u'Dossier inconnu') resp = app.post_json(url, params=params) @@ -553,13 +605,14 @@ def test_registration(dpark, app, application_thirdparty_subscription): "application_id": "12-4", "application_payment_type": "10", "application_thirdparty_subscription": application_thirdparty_subscription, - "application_type": 1 + "application_type": 1, } # with missing parameter app.post_json(url, params=params, status=400) params['address_district'] = "PERI" # with an imcplete application - dpark.mock_responses.append(""" + dpark.mock_responses.append( + """ @@ -570,16 +623,19 @@ def test_registration(dpark, app, application_thirdparty_subscription): -""") +""" + ) resp = app.post_json(url, params=params) - demande_abonnementtiers = \ - '%s' % \ - repr(application_thirdparty_subscription).lower() + demande_abonnementtiers = ( + '%s' + % repr(application_thirdparty_subscription).lower() + ) assert demande_abonnementtiers in force_text(dpark.mock_requests[0].body) assert resp.json['err'] == 1 assert resp.json['err_desc'] == u'Dossier incomplet' # with complete application - dpark.mock_responses.append(""" + dpark.mock_responses.append( + """ @@ -592,7 +648,8 @@ def test_registration(dpark, app, application_thirdparty_subscription): -""") +""" + ) resp = app.post_json(url, params=params) assert demande_abonnementtiers in force_text(dpark.mock_requests[1].body) assert resp.json['data']['numerodossier'] == 22334 @@ -616,24 +673,29 @@ def test_send_files(dpark, app, settings, freezer): assert resp.json['err_desc'] == u'Numéro de demande introuvable' params['justif_domicile'] = { - 'filename': 'address_proof.pDf', 'content_type': 'application/pdf', - 'content': force_text(base64.b64encode(b'%PDF this is my proof of address'))} + 'filename': 'address_proof.pDf', + 'content_type': 'application/pdf', + 'content': force_text(base64.b64encode(b'%PDF this is my proof of address')), + } params['cartegrise,1'] = { - 'filename': 'cartegrise.pdf', 'content_type': 'application/pdf', - 'content': force_text(base64.b64encode(b'%PDF carte grise 1')) + 'filename': 'cartegrise.pdf', + 'content_type': 'application/pdf', + 'content': force_text(base64.b64encode(b'%PDF carte grise 1')), } params['toto,6'] = { 'filename': 'cartegrisetoto.jpg', 'content_type': 'application/pdf', - 'content': force_text(base64.b64encode(JPEG_CONTENT)) + 'content': force_text(base64.b64encode(JPEG_CONTENT)), } params['cartegrise,6'] = { - 'filename': 'cartegrise2.pdf', 'content_type': 'application/pdf', - 'content': force_text(base64.b64encode(b'%PDF carte grise 2')) + 'filename': 'cartegrise2.pdf', + 'content_type': 'application/pdf', + 'content': force_text(base64.b64encode(b'%PDF carte grise 2')), } params['taxe_habitat'] = { - 'filename': 'cartegrise2.pdf', 'content_type': 'application/pdf', - 'content': None + 'filename': 'cartegrise2.pdf', + 'content_type': 'application/pdf', + 'content': None, } params['taxe_habitat,2'] = {'content_type': 'application/pdf', 'content': 'whatever'} params['impot_revenu'] = 'this is my tax notice' @@ -642,7 +704,7 @@ def test_send_files(dpark, app, settings, freezer): u" value is not a dict", u" is either absent or has an invalid value", u" is either absent or has an invalid value", - u"Invalid document type: " + u"Invalid document type: ", ] assert resp.json['err'] == 1 assert resp.json['err_desc'] == '%s' % errors @@ -672,25 +734,34 @@ def test_send_files(dpark, app, settings, freezer): assert resp.json['data'] is True assert len(dpark.mock_requests) == 1 root = ET.fromstring(dpark.mock_requests[0].body) - pj_node = root.find('{http://schemas.xmlsoap.org/soap/envelope/}Body/{urn:Webservice_Residants}PLS_ENVOIPJ') + pj_node = root.find( + '{http://schemas.xmlsoap.org/soap/envelope/}Body/{urn:Webservice_Residants}PLS_ENVOIPJ' + ) assert pj_node.find('NumeroTeledossier').text == 'E-8-N5UTAK6P' assert pj_node.find('NumeroDossier').text == '61718' assert pj_node.find('NbFichier').text == '4' assert len(pj_node.findall('Bloc_Fichiers')) == 4 assert pj_node.findall('Bloc_Fichiers')[0].find('TypeDocument').text == '6' assert pj_node.findall('Bloc_Fichiers')[0].find('NomFichier').text == 'cartegrise.pdf' - assert pj_node.findall('Bloc_Fichiers')[0].find('Fichier').text == force_text(base64.b64encode(b'%PDF carte grise 1')) + assert pj_node.findall('Bloc_Fichiers')[0].find('Fichier').text == force_text( + base64.b64encode(b'%PDF carte grise 1') + ) assert pj_node.findall('Bloc_Fichiers')[1].find('TypeDocument').text == '6' assert pj_node.findall('Bloc_Fichiers')[1].find('NomFichier').text == 'cartegrise2.pdf' assert pj_node.findall('Bloc_Fichiers')[1].find('Fichier').text == force_text( - base64.b64encode(b'%PDF carte grise 2')) + base64.b64encode(b'%PDF carte grise 2') + ) assert pj_node.findall('Bloc_Fichiers')[2].find('TypeDocument').text == '2' assert pj_node.findall('Bloc_Fichiers')[2].find('NomFichier').text == 'address_proof.pDf' - assert pj_node.findall('Bloc_Fichiers')[2].find('Fichier').text == force_text(base64.b64encode( - b'%PDF this is my proof of address')) + assert pj_node.findall('Bloc_Fichiers')[2].find('Fichier').text == force_text( + base64.b64encode(b'%PDF this is my proof of address') + ) assert pj_node.findall('Bloc_Fichiers')[3].find('TypeDocument').text == '73' from passerelle.utils.conversion import to_pdf + assert pj_node.findall('Bloc_Fichiers')[3].find('NomFichier').text == 'cartegrisetoto.jpg.pdf' - assert pj_node.findall('Bloc_Fichiers')[3].find('Fichier').text == force_text(base64.b64encode(to_pdf(JPEG_CONTENT))) + assert pj_node.findall('Bloc_Fichiers')[3].find('Fichier').text == force_text( + base64.b64encode(to_pdf(JPEG_CONTENT)) + ) assert base64.b64decode(pj_node.findall('Bloc_Fichiers')[3].find('Fichier').text).startswith(b'%PDF') diff --git a/tests/test_ensure_jsonbfields.py b/tests/test_ensure_jsonbfields.py index 9c4317ec..02b768e7 100644 --- a/tests/test_ensure_jsonbfields.py +++ b/tests/test_ensure_jsonbfields.py @@ -12,18 +12,25 @@ from passerelle.contrib.teamnet_axel.models import TeamnetAxel pytestmark = pytest.mark.django_db + @pytest.fixture def setup(): - - def maker(columns_keynames='fam,id,lname,fname,sex', filename='data.csv', sheet_name='Feuille2', - data=b''): - csv = CsvDataSource.objects.create(csv_file=File(BytesIO(data), filename), - sheet_name=sheet_name, columns_keynames=columns_keynames, - slug='test', title='a title', - description='a description') - teamnet = TeamnetAxel.objects.create(slug='test', billing_regies={}, - wsdl_url='http://example.net/AXEL_WS/AxelWS.php?wsdl') + def maker( + columns_keynames='fam,id,lname,fname,sex', filename='data.csv', sheet_name='Feuille2', data=b'' + ): + csv = CsvDataSource.objects.create( + csv_file=File(BytesIO(data), filename), + sheet_name=sheet_name, + columns_keynames=columns_keynames, + slug='test', + title='a title', + description='a description', + ) + teamnet = TeamnetAxel.objects.create( + slug='test', billing_regies={}, wsdl_url='http://example.net/AXEL_WS/AxelWS.php?wsdl' + ) return csv, teamnet + return maker @@ -37,8 +44,12 @@ def test_ensure_jsonb_fields(setup): assert line[2] == 'jsonb' # alter columns - cursor.execute('ALTER TABLE csvdatasource_csvdatasource ALTER COLUMN _dialect_options TYPE text USING _dialect_options::text') - cursor.execute('ALTER TABLE teamnet_axel_teamnetaxel ALTER COLUMN billing_regies TYPE text USING billing_regies::text') + cursor.execute( + 'ALTER TABLE csvdatasource_csvdatasource ALTER COLUMN _dialect_options TYPE text USING _dialect_options::text' + ) + cursor.execute( + 'ALTER TABLE teamnet_axel_teamnetaxel ALTER COLUMN billing_regies TYPE text USING billing_regies::text' + ) call_command('ensure_jsonb') diff --git a/tests/test_fake_family.py b/tests/test_fake_family.py index e4ce431e..61a6ac28 100644 --- a/tests/test_fake_family.py +++ b/tests/test_fake_family.py @@ -9,6 +9,7 @@ from passerelle.contrib.fake_family.models import FakeFamily pytestmark = pytest.mark.django_db + def test_init_fake_family(): fake = FakeFamily.objects.create() jsondb = fake.jsondatabase @@ -25,10 +26,12 @@ def test_init_fake_family(): def fakefam(db): return FakeFamily.objects.create(title='fakefam', slug='fakefam') + def test_fake_family_dump(app, fakefam): resp = app.get(reverse('fake-family-dump', kwargs={'slug': fakefam.slug})) assert resp.json == fakefam.jsondatabase + def test_fake_family_urls(app, fakefam): name_id = '__test_name_id__' family = fakefam.jsondatabase['families']['1'] @@ -36,31 +39,33 @@ def test_fake_family_urls(app, fakefam): adult = fakefam.jsondatabase['adults'][adult_id] login = adult['login'] password = adult['password'] - resp = app.get(reverse('fake-family-link', kwargs={'slug': fakefam.slug}) + \ - '?NameID=%s&login=%s&password=%s' % (name_id, login, password)) + resp = app.get( + reverse('fake-family-link', kwargs={'slug': fakefam.slug}) + + '?NameID=%s&login=%s&password=%s' % (name_id, login, password) + ) assert resp.json['err'] == 0 assert resp.json['data'] == adult - resp = app.get(reverse('fake-family-info', kwargs={'slug': fakefam.slug}) + \ - '?NameID=%s' % name_id) + resp = app.get(reverse('fake-family-info', kwargs={'slug': fakefam.slug}) + '?NameID=%s' % name_id) assert resp.json['err'] == 0 assert resp.json['data']['adults'][0]['address'] == adult['address'] - resp = app.get(reverse('fake-family-key', kwargs={'slug': fakefam.slug, 'key': 'adults'}) + \ - '?NameID=%s' % name_id) + resp = app.get( + reverse('fake-family-key', kwargs={'slug': fakefam.slug, 'key': 'adults'}) + '?NameID=%s' % name_id + ) assert resp.json['err'] == 0 assert resp.json['data'][0]['address'] == adult['address'] - resp = app.get(reverse('fake-family-key', kwargs={'slug': fakefam.slug, 'key': 'children'}) + \ - '?NameID=%s' % name_id) + resp = app.get( + reverse('fake-family-key', kwargs={'slug': fakefam.slug, 'key': 'children'}) + '?NameID=%s' % name_id + ) assert resp.json['err'] == 0 assert len(resp.json['data']) == 2 - resp = app.get(reverse('fake-family-unlink', kwargs={'slug': fakefam.slug}) + \ - '?NameID=%s' % name_id) + resp = app.get(reverse('fake-family-unlink', kwargs={'slug': fakefam.slug}) + '?NameID=%s' % name_id) assert resp.json['err'] == 0 assert resp.json['data'] == 'ok' - resp = app.get(reverse('fake-family-unlink', kwargs={'slug': fakefam.slug}) + \ - '?NameID=%s' % name_id) + resp = app.get(reverse('fake-family-unlink', kwargs={'slug': fakefam.slug}) + '?NameID=%s' % name_id) assert resp.json['err'] == 0 assert resp.json['data'] == 'ok (but there was no links)' + def test_fake_family_bad_login(app, fakefam): name_id = '__test_name_id__' family = fakefam.jsondatabase['families']['1'] @@ -68,14 +73,18 @@ def test_fake_family_bad_login(app, fakefam): adult = fakefam.jsondatabase['adults'][adult_id] login = adult['login'] password = 'bad' + adult['password'] - resp = app.get(reverse('fake-family-link', kwargs={'slug': fakefam.slug}) + \ - '?NameID=%s&login=%s&password=%s' % (name_id, login, password), - status=403) + resp = app.get( + reverse('fake-family-link', kwargs={'slug': fakefam.slug}) + + '?NameID=%s&login=%s&password=%s' % (name_id, login, password), + status=403, + ) assert resp.json['err'] == 100 assert resp.json['err_desc'] == 'bad password' login = 'bad@login' - resp = app.get(reverse('fake-family-link', kwargs={'slug': fakefam.slug}) + \ - '?NameID=%s&login=%s&password=%s' % (name_id, login, password), - status=403) + resp = app.get( + reverse('fake-family-link', kwargs={'slug': fakefam.slug}) + + '?NameID=%s&login=%s&password=%s' % (name_id, login, password), + status=403, + ) assert resp.json['err'] == 100 assert resp.json['err_desc'] == 'unknown login' diff --git a/tests/test_family.py b/tests/test_family.py index a2e30f4f..4c26d050 100644 --- a/tests/test_family.py +++ b/tests/test_family.py @@ -31,46 +31,69 @@ pytestmark = pytest.mark.django_db nameid = 'foobarnameid' API_KEY = 'family' + @pytest.fixture def resource(db): filepath = os.path.join(os.path.dirname(__file__), 'data', 'family_data.zip') with open(filepath, 'rb') as fd: - resource = GenericFamily.objects.create(title='test', slug='test', archive=File(fd, 'family_data.zip')) + resource = GenericFamily.objects.create( + title='test', slug='test', archive=File(fd, 'family_data.zip') + ) api = ApiUser.objects.create(username='family', keytype='API', key=API_KEY) obj_type = ContentType.objects.get_for_model(GenericFamily) - AccessRight.objects.create(codename='can_access', apiuser=api, - resource_type=obj_type, resource_pk=resource.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=resource.pk + ) return resource + def test_link_to_family(app, resource): - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}), - params={'NameID': nameid, 'login': '9407', - 'password': 'gkh0UrrH', 'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={'connector': 'family', 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}, + ), + params={'NameID': nameid, 'login': '9407', 'password': 'gkh0UrrH', 'apikey': API_KEY}, + ) assert r.json['data'] assert FamilyLink.objects.filter(resource=resource, name_id=nameid).exists() + def test_unlink_from_family(app, resource): - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}), - params={'NameID': nameid, 'login': '1364', - 'password': 'Li6LN1ID', 'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={'connector': 'family', 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}, + ), + params={'NameID': nameid, 'login': '1364', 'password': 'Li6LN1ID', 'apikey': API_KEY}, + ) assert r.json['data'] - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'family', 'rest': 'unlink/'}), - params={'NameID': nameid, 'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={'connector': 'family', 'slug': resource.slug, 'endpoint': 'family', 'rest': 'unlink/'}, + ), + params={'NameID': nameid, 'apikey': API_KEY}, + ) assert r.json['data'] assert not FamilyLink.objects.filter(resource=resource, name_id=nameid).exists() + def test_family_infos(app, resource): - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}), - params={'NameID': nameid, 'login': '9407', - 'password': 'gkh0UrrH', 'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={'connector': 'family', 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}, + ), + params={'NameID': nameid, 'login': '9407', 'password': 'gkh0UrrH', 'apikey': API_KEY}, + ) assert r.json['data'] - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'family/'}), - params={'NameID': nameid, 'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', kwargs={'connector': 'family', 'slug': resource.slug, 'endpoint': 'family/'} + ), + params={'NameID': nameid, 'apikey': API_KEY}, + ) data = r.json['data'] assert data['id'] assert data['adults'] @@ -83,16 +106,29 @@ def test_family_infos(app, resource): assert data['address'] is not None assert data['quotient'] == '1370.50' + def test_family_members(app, resource): - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}), - params={'NameID': nameid, 'login': '23762', - 'password': 's6HliUMX', 'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={'connector': 'family', 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}, + ), + params={'NameID': nameid, 'login': '23762', 'password': 's6HliUMX', 'apikey': API_KEY}, + ) assert r.json['data'] for typ in ('adults', 'children'): - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'family', 'rest': '%s/' % typ}), - params={'NameID': nameid, 'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'family', + 'slug': resource.slug, + 'endpoint': 'family', + 'rest': '%s/' % typ, + }, + ), + params={'NameID': nameid, 'apikey': API_KEY}, + ) data = r.json['data'] assert len(data) @@ -107,124 +143,217 @@ def test_family_members(app, resource): assert person['city'] is not None assert person['address'] is not None - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'family', 'rest': '%s/' % typ}), - params={'NameID': 'unknown', 'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'family', + 'slug': resource.slug, + 'endpoint': 'family', + 'rest': '%s/' % typ, + }, + ), + params={'NameID': 'unknown', 'apikey': API_KEY}, + ) data = r.json['data'] assert len(data) == 0 + def test_get_family_invoices(app, resource): - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}), - params={'NameID': nameid, 'login': '19184', - 'password': '8xUhrK6e', 'apikey': API_KEY}) - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'regie', 'rest': 'invoices/'}), - params={'NameID': nameid, 'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={'connector': 'family', 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}, + ), + params={'NameID': nameid, 'login': '19184', 'password': '8xUhrK6e', 'apikey': API_KEY}, + ) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={'connector': 'family', 'slug': resource.slug, 'endpoint': 'regie', 'rest': 'invoices/'}, + ), + params={'NameID': nameid, 'apikey': API_KEY}, + ) assert r.json['data'] for i in r.json['data']: assert not i['paid'] - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'regie', - 'rest': 'invoices/history/'}), - params={'NameID': nameid, 'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'family', + 'slug': resource.slug, + 'endpoint': 'regie', + 'rest': 'invoices/history/', + }, + ), + params={'NameID': nameid, 'apikey': API_KEY}, + ) assert r.json['data'] for i in r.json['data']: assert i['paid'] assert i['has_pdf'] + def test_get_family_invoice(app, resource): - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}), - params={'NameID': nameid, 'login': '19184', - 'password': '8xUhrK6e', 'apikey': API_KEY}) - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'regie', - 'rest': 'invoices/'}), - params={'NameID': nameid, 'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={'connector': 'family', 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}, + ), + params={'NameID': nameid, 'login': '19184', 'password': '8xUhrK6e', 'apikey': API_KEY}, + ) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={'connector': 'family', 'slug': resource.slug, 'endpoint': 'regie', 'rest': 'invoices/'}, + ), + params={'NameID': nameid, 'apikey': API_KEY}, + ) assert r.json['data'] invoice = r.json['data'][0] - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'regie', - 'rest': 'invoice/%s/' % invoice['label']}), - params={'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'family', + 'slug': resource.slug, + 'endpoint': 'regie', + 'rest': 'invoice/%s/' % invoice['label'], + }, + ), + params={'apikey': API_KEY}, + ) assert r.json['data'] for field in ('label', 'amount', 'paid', 'created', 'pay_limit_date'): assert r.json['data'][field] == invoice[field] assert r.json['data']['reference_id'] == r.json['data']['label'] + def test_get_invoice_pdf(app, resource): - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}), - params={'NameID': nameid, 'login': '11959', - 'password': '1WI6JOux', 'apikey': API_KEY}) - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'regie', - 'rest': 'invoices/history/'}), - params={'NameID': nameid, 'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={'connector': 'family', 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}, + ), + params={'NameID': nameid, 'login': '11959', 'password': '1WI6JOux', 'apikey': API_KEY}, + ) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'family', + 'slug': resource.slug, + 'endpoint': 'regie', + 'rest': 'invoices/history/', + }, + ), + params={'NameID': nameid, 'apikey': API_KEY}, + ) invoice = r.json['data'][0] assert invoice - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'regie', - 'rest': 'invoice/%s/pdf/' % invoice['label']}), - params={'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'family', + 'slug': resource.slug, + 'endpoint': 'regie', + 'rest': 'invoice/%s/pdf/' % invoice['label'], + }, + ), + params={'apikey': API_KEY}, + ) assert 'Content-Type' in r.headers assert 'Content-Disposition' in r.headers assert r.headers['Content-Type'] == 'application/pdf' assert r.headers['Content-Disposition'] == 'attachment; filename=%s.pdf' % invoice['label'] + def test_pay_invoice(app, resource): - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}), - params={'NameID': nameid, 'login': '19184', - 'password': '8xUhrK6e', 'apikey': API_KEY}) - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'regie', - 'rest': 'invoices/'}), - params={'NameID': nameid}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={'connector': 'family', 'slug': resource.slug, 'endpoint': 'family', 'rest': 'link/'}, + ), + params={'NameID': nameid, 'login': '19184', 'password': '8xUhrK6e', 'apikey': API_KEY}, + ) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={'connector': 'family', 'slug': resource.slug, 'endpoint': 'regie', 'rest': 'invoices/'}, + ), + params={'NameID': nameid}, + ) invoice = r.json['data'][0] - payment_url = reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'regie', - 'rest': 'invoice/%s/pay/' % invoice['label']}) - r = app.post_json(payment_url + '?' + urlencode({'apikey': API_KEY}), - params={'transaction_id': str(uuid4()), - 'transaction_date': timezone.now().strftime(DATETIME_FORMAT)} + payment_url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'family', + 'slug': resource.slug, + 'endpoint': 'regie', + 'rest': 'invoice/%s/pay/' % invoice['label'], + }, + ) + r = app.post_json( + payment_url + '?' + urlencode({'apikey': API_KEY}), + params={'transaction_id': str(uuid4()), 'transaction_date': timezone.now().strftime(DATETIME_FORMAT)}, ) assert r.json['data'] - r = app.get(reverse('generic-endpoint', kwargs={'connector': 'family', - 'slug': resource.slug, 'endpoint': 'regie', - 'rest': 'invoice/%s/' % invoice['label']}), - params={'apikey': API_KEY}) + r = app.get( + reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'family', + 'slug': resource.slug, + 'endpoint': 'regie', + 'rest': 'invoice/%s/' % invoice['label'], + }, + ), + params={'apikey': API_KEY}, + ) assert r.json['data']['paid'] assert float(r.json['data']['amount']) == 0 + def test_fondettes_concerto_loader(): Invoice.objects.all().delete() filepath = os.path.join(os.path.dirname(__file__), 'data', 'family_data_fondettes.zip') with open(filepath, 'rb') as fd: - resource = GenericFamily.objects.create(title='test fondettes', - slug='test-fondettes', archive=File(fd, 'family_data_fondettes.zip'), - file_format='concerto_fondettes') + resource = GenericFamily.objects.create( + title='test fondettes', + slug='test-fondettes', + archive=File(fd, 'family_data_fondettes.zip'), + file_format='concerto_fondettes', + ) assert Invoice.objects.filter(resource=resource).count() == 630 assert len([x for x in Invoice.objects.filter(resource=resource) if x.has_pdf]) == 4 assert Invoice.objects.filter(paid=True).count() == 312 assert Invoice.objects.filter(paid=False).count() == 318 assert Invoice.objects.filter(online_payment=False).count() == 2 + def test_archive_validation(): filepath = os.path.join(os.path.dirname(__file__), 'data', 'iparapheur.wsdl') with open(filepath) as fd: - resource = GenericFamily.objects.create(title='test', slug='test', archive=File(fd, 'iparapheur.wsdl')) + resource = GenericFamily.objects.create( + title='test', slug='test', archive=File(fd, 'iparapheur.wsdl') + ) with pytest.raises(ValidationError): resource.clean() + def test_fondettes_concerto_validation(): filepath = os.path.join(os.path.dirname(__file__), 'data', 'family_data_fondettes.zip') with open(filepath, 'rb') as fd: - resource = GenericFamily.objects.create(title='test fondettes', - slug='test fondettes', archive=File(fd, 'family_data_fondettes.zip'), file_format='concerto_fondettes') + resource = GenericFamily.objects.create( + title='test fondettes', + slug='test fondettes', + archive=File(fd, 'family_data_fondettes.zip'), + file_format='concerto_fondettes', + ) resource.clean() filepath = os.path.join(os.path.dirname(__file__), 'data', 'family_data.zip') @@ -232,14 +361,19 @@ def test_fondettes_concerto_validation(): with pytest.raises(ValidationError): resource.clean() + def test_orleans_concerto_loader(): # all related objects will also be deleted Family.objects.all().delete() - filepath = os.path.join(os.path.dirname(__file__), 'data', 'orleans', - 'family_data_orleans.zip') - resource = GenericFamily(title='test orleans', - slug='test-orleans', archive=File(open(filepath, 'rb'), 'family_data_orleans.zip'), file_format='concerto_orleans') + filepath = os.path.join(os.path.dirname(__file__), 'data', 'orleans', 'family_data_orleans.zip') + resource = GenericFamily( + title='test orleans', + slug='test-orleans', + archive=File(open(filepath, 'rb'), 'family_data_orleans.zip'), + file_format='concerto_orleans', + ) from passerelle.apps.family.loaders.concerto_orleans import Loader + loader = Loader(resource) loader.archive = zipfile.ZipFile(filepath) @@ -291,6 +425,7 @@ def test_orleans_concerto_loader(): assert Child.objects.all().count() == 35 assert Invoice.objects.filter(resource=resource).count() == 0 + def test_orleans_data_import_command(): with pytest.raises(CommandError) as error: call_command('import_orleans_data') @@ -300,16 +435,18 @@ def test_orleans_data_import_command(): data_dir = os.path.join(cur_dir, 'data', 'orleans') with open(os.path.join(data_dir, 'family_data_orleans.zip'), 'rb') as fd: - resource = GenericFamily.objects.create(title='test orleans', - archive=File(fd, 'family_data_orleans.zip'), - slug='test-orleans', file_format='concerto_orleans') + resource = GenericFamily.objects.create( + title='test orleans', + archive=File(fd, 'family_data_orleans.zip'), + slug='test-orleans', + file_format='concerto_orleans', + ) # cleanup data before launching import Family.objects.filter(resource=resource).delete() Invoice.objects.filter(resource=resource).delete() - call_command('import_orleans_data', - archive_file=os.path.join(data_dir, 'family_data_orleans.zip')) + call_command('import_orleans_data', archive_file=os.path.join(data_dir, 'family_data_orleans.zip')) assert Family.objects.filter(resource=resource).count() == 0 assert Invoice.objects.filter(resource=resource).count() == 0 @@ -319,9 +456,11 @@ def test_orleans_data_import_command(): shutil.rmtree(resource_invoices_dir) os.symlink(os.path.join(data_dir, 'factures'), resource_invoices_dir) - call_command('import_orleans_data', - archive_file=os.path.join(data_dir, 'family_data_orleans.zip'), - connector='test-orleans') + call_command( + 'import_orleans_data', + archive_file=os.path.join(data_dir, 'family_data_orleans.zip'), + connector='test-orleans', + ) os.unlink(resource_invoices_dir) assert Family.objects.filter(resource=resource).count() == 18 @@ -329,45 +468,52 @@ def test_orleans_data_import_command(): assert Child.objects.all().count() == 35 assert Invoice.objects.filter(resource=resource).count() == 7 + def test_family_pending_invoices_by_nameid_with_no_links(): test_orleans_data_import_command() resource = GenericFamily.objects.get() links = resource.get_pending_invoices_by_nameid(None) assert links['data'] == {} + def test_family_pending_invoices_by_nameid(): test_orleans_data_import_command() resource = GenericFamily.objects.get() family = Family.objects.get(external_id='22380') - link = FamilyLink.objects.create(resource=resource, family=family, - name_id='testnameid1') + link = FamilyLink.objects.create(resource=resource, family=family, name_id='testnameid1') family = Family.objects.get(external_id='1228') - link = FamilyLink.objects.create(resource=resource, family=family, - name_id='testnameid2') + link = FamilyLink.objects.create(resource=resource, family=family, name_id='testnameid2') links = resource.get_pending_invoices_by_nameid(None) assert len(links['data']) == 2 for uuid, invoices in links['data'].items(): assert uuid in ('testnameid1', 'testnameid2') assert len(invoices) >= 1 + def test_incorrect_orleans_data(caplog): - filepath = os.path.join(os.path.dirname(__file__), 'data', - 'family_incorrect_data_orleans.zip') - resource = GenericFamily.objects.create(title='test orleans', - slug='test-orleans', archive=File(open(filepath, 'rb'), 'family_incorrect_data_orleans.zip'), - file_format='concerto_orleans') + filepath = os.path.join(os.path.dirname(__file__), 'data', 'family_incorrect_data_orleans.zip') + resource = GenericFamily.objects.create( + title='test orleans', + slug='test-orleans', + archive=File(open(filepath, 'rb'), 'family_incorrect_data_orleans.zip'), + file_format='concerto_orleans', + ) for record in caplog.records: assert 'Error occured while importing data:' in record.message assert record.name == 'passerelle.resource.family.test-orleans' assert record.levelno == logging.ERROR + def test_egee_thonon_loader(): Invoice.objects.all().delete() filepath = os.path.join(os.path.dirname(__file__), 'data', 'family_data_egee_thonon.zip') with open(filepath, 'rb') as fd: - resource = GenericFamily.objects.create(title='test-egee-thonon', - slug='test-egee-thonon', archive=File(fd, 'family_data_egee_thonon.zip'), - file_format='egee_thonon') + resource = GenericFamily.objects.create( + title='test-egee-thonon', + slug='test-egee-thonon', + archive=File(fd, 'family_data_egee_thonon.zip'), + file_format='egee_thonon', + ) assert Invoice.objects.filter(resource=resource).count() == 4 for invoice in Invoice.objects.all(): assert len(invoice.external_id) == 13 diff --git a/tests/test_feeds.py b/tests/test_feeds.py index cc2c85fd..9a42b199 100644 --- a/tests/test_feeds.py +++ b/tests/test_feeds.py @@ -43,7 +43,8 @@ FEED_EXAMPLE = u""" @pytest.fixture def connector(db): - return utils.setup_access_rights(Feed.objects.create( slug='test', url='http://example.net/')) + return utils.setup_access_rights(Feed.objects.create(slug='test', url='http://example.net/')) + @mock.patch('passerelle.utils.Request.get') def test_feed(mocked_get, app, connector): diff --git a/tests/test_gdema.py b/tests/test_gdema.py index 0d124fca..53044ff1 100644 --- a/tests/test_gdema.py +++ b/tests/test_gdema.py @@ -12,7 +12,9 @@ from passerelle.base.models import ApiUser, AccessRight SERVICES = '[{"AdminService":"ADMINISTRATEUR DIV - Transports et D\xc3\xa9placements","CommuneService":null,"Id":"16151","Label":"DMT - Mobilité et transports","Typology":[]},{"AdminService":"ADMINISTRATEUR DEP ADMINISTRATION","CommuneService":null,"Id":"10173","Label":"DESPU - Administration Direction environnement et services publics urbains","Typology":[{"Text":"Maintenance Cie","Value":"13067"},{"Text":"Sensibilisation en milieu scolaire","Value":"14948"},{"Text":"Demandes diverses","Value":"11532"},{"Text":"Demande de stage compostage","Value":"12992"},{"Text":"Pr\xc3\xa9sence de d\xc3\xa9chets verts","Value":"20432"}]},{"AdminService":"ADMINISTRATEUR DED3","CommuneService":null,"Id":"10426","Label":"DEE - Périscolaire et éducatif","Typology":[{"Text":"Activités périscolaires","Value":"10804"},{"Text":"Garderie","Value":"10805"},{"Text":"Restauration scolaire","Value":"10806"},{"Text":"Restauration scolaire \\/ Nutrition","Value":"11180"},{"Text":"Restauration scolaire \\/ Abonnements \\/cantine et r\xc3\xa9gie","Value":"10807"},{"Text":"Projets éducatifs en temps scolaire","Value":"10808"},{"Text":"Autres","Value":"10809"}]}]' -CIVILITY = '[{"Text":"Monsieur","Value":"1"},{"Text":"Madame","Value":"2"},{"Text":"Mademoiselle","Value":"3"}]' +CIVILITY = ( + '[{"Text":"Monsieur","Value":"1"},{"Text":"Madame","Value":"2"},{"Text":"Mademoiselle","Value":"3"}]' +) REQUEST = r'{"AnswerToProvide":true,"AssigmentServiceName":"DEPE - Projets et maintenance du patrimoine","AssigmentStructure":"Cellule Travaux","AssociationId":0,"ClosingDate":"/Date(1306314926000+0200)/","Confidential":false,"DataEntryService":"Education","Description":"contrôle de toutes les portes extérieures des classes - gonds faibles pour le poids de la porte(N° 11353 v1)","DesiredResponseDate":null,"ExpectedDate":null,"ExpectedInterventionDate":null,"Files":[],"Handler":{"CUSStreetCode":null,"Cedex":null,"CellPhone":null,"Civility":null,"CivilityId":0,"Fax":null,"Firstname":null,"Information":null,"Lastname":"ANONYME","Mail":null,"Organization":0,"Phone":null,"SectionCode":null,"SectionLetter":null,"Street":null,"StreetLetter":null,"StreetNumber":null,"StructureLabel":null,"StructureShortLabel":null,"Title":null,"TitleId":0,"Town":null,"TownLabel":null,"ZipCode":null},"Id":1,"InputChannel":"Courrier","Localization":{"AdditionnalInformation":null,"CUSStreetCode":"2075","ElectedDistrict":"006","JobSector":null,"Other":"(sous-localisation : Bâtiment)","SectionCode":"1","SectionLetter":"_","Site":"Conseil","SiteId":1790,"Street":"RUE DE LA PLACE","StreetLetter":null,"StreetNumber":"2","TerritorialSector":"105","Town":"482","TownLabel":"STRASBOURG","ZipCode":"67000"},"Origin":2,"OriginLabel":"Usager","Priority":2,"PriorityLabel":"Normal","ReceptDate":"/Date(1165964400000+0100)/","Refused":false,"ReleaseDate":null,"Response":false,"ResponseFinal":true,"ResponseIntermediate":false,"Responses":[{"Date":"/Date(1306274400000+0200)/","OutputChannel":"Service X","Resume":"Intervention réalisée","SibeliusReference":null,"SignatureName":"UC","Type":2,"TypeLabel":"Finale"}],"SibeliusReference":null,"SiteCode":null,"SiteName":"Conseil","Sleeping":null,"State":64,"StateLabel":"Cloturée","Structure":"Cellule Travaux","Syscreationdate":"/Date(1165186800000+0100)/","Sysmodificationdate":"/Date(1306314926000+0200)/","Typology":{"Id":11168,"Label":"Maintenance"}}' @@ -35,7 +37,7 @@ CREATE_INPUT = { # from Publik system 'filename': 'test1.txt', 'content_type': 'text/plain', 'content': 'dW4=', - } + }, } CONVERTED_INPUT = { # to GDEMA webservice @@ -43,35 +45,26 @@ CONVERTED_INPUT = { # to GDEMA webservice "Id": "21012", }, "Description": "this is a test", - "Localization": { - "Town": "482", - "TownLabel": "STRASBOURG" - }, + "Localization": {"Town": "482", "TownLabel": "STRASBOURG"}, "Origin": "2", "Priority": "2", "ReceptDate": "/Date(1165964400000+0100)/", "Files": [ - { - "Base64Stream": "dW4=", - "Name": "test1.txt" - }, - { - "Base64Stream": "ZGV1eA==", - "Name": "test2.txt" - } - ] + {"Base64Stream": "dW4=", "Name": "test1.txt"}, + {"Base64Stream": "ZGV1eA==", "Name": "test2.txt"}, + ], } @pytest.fixture def gdema(db): - gdema = Gdema.objects.create(slug='test', - service_url='https://gdema.example.net/api/') + gdema = Gdema.objects.create(slug='test', service_url='https://gdema.example.net/api/') # open access api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(gdema) - AccessRight.objects.create(codename='can_access', apiuser=api, - resource_type=obj_type, resource_pk=gdema.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=gdema.pk + ) return gdema @@ -79,8 +72,7 @@ def test_gdema_services_and_typologies(app, gdema): endpoint = utils.generic_endpoint_url('gdema', 'referentiel', slug=gdema.slug) assert endpoint == '/gdema/test/referentiel' with mock.patch('passerelle.utils.Request.get') as requests_get: - requests_get.return_value = utils.FakedResponse(content=SERVICES, - status_code=200) + requests_get.return_value = utils.FakedResponse(content=SERVICES, status_code=200) resp = app.get(endpoint + '/service/', status=200) assert requests_get.call_count == 1 assert requests_get.call_args[0][0] == 'https://gdema.example.net/api/referentiel/service' @@ -98,7 +90,10 @@ def test_gdema_services_and_typologies(app, gdema): assert resp.json['data'][0]['id'] == '13067' assert resp.json['data'][0]['text'] == u'Maintenance Cie' assert resp.json['data'][0]['service_id'] == '10173' - assert resp.json['data'][0]['service_text'] == u'DESPU - Administration Direction environnement et services publics urbains' + assert ( + resp.json['data'][0]['service_text'] + == u'DESPU - Administration Direction environnement et services publics urbains' + ) resp = app.get(endpoint + '/typology/?service_id=10426', status=200) assert requests_get.call_count == 3 @@ -115,8 +110,7 @@ def test_gdema_referentiel(app, gdema): endpoint = utils.generic_endpoint_url('gdema', 'referentiel', slug=gdema.slug) assert endpoint == '/gdema/test/referentiel' with mock.patch('passerelle.utils.Request.get') as requests_get: - requests_get.return_value = utils.FakedResponse(content=CIVILITY, - status_code=200) + requests_get.return_value = utils.FakedResponse(content=CIVILITY, status_code=200) resp = app.get(endpoint + '/civility/', status=200) assert requests_get.call_count == 1 assert requests_get.call_args[0][0] == 'https://gdema.example.net/api/referentiel/civility' @@ -142,8 +136,7 @@ def test_gdema_get_request(app, gdema): endpoint = utils.generic_endpoint_url('gdema', 'get-request', slug=gdema.slug) assert endpoint == '/gdema/test/get-request' with mock.patch('passerelle.utils.Request.get') as requests_get: - requests_get.return_value = utils.FakedResponse(content=REQUEST, - status_code=200) + requests_get.return_value = utils.FakedResponse(content=REQUEST, status_code=200) resp = app.get(endpoint + '/1/', status=200) assert requests_get.call_count == 1 assert requests_get.call_args[0][0] == 'https://gdema.example.net/api/request/1' @@ -160,8 +153,7 @@ def test_gdema_get_request(app, gdema): endpoint = utils.generic_endpoint_url('gdema', 'get-request-state', slug=gdema.slug) assert endpoint == '/gdema/test/get-request-state' with mock.patch('passerelle.utils.Request.get') as requests_get: - requests_get.return_value = utils.FakedResponse(content=REQUEST_STATE, - status_code=200) + requests_get.return_value = utils.FakedResponse(content=REQUEST_STATE, status_code=200) resp = app.get(endpoint + '/1/', status=200) assert requests_get.call_count == 1 assert requests_get.call_args[0][0] == 'https://gdema.example.net/api/request/1/state' @@ -174,8 +166,7 @@ def test_gdema_create_request(app, gdema): endpoint = utils.generic_endpoint_url('gdema', 'create-request', slug=gdema.slug) assert endpoint == '/gdema/test/create-request' with mock.patch('passerelle.utils.Request.post') as requests_post: - requests_post.return_value = utils.FakedResponse(content=REQUEST, - status_code=200) + requests_post.return_value = utils.FakedResponse(content=REQUEST, status_code=200) resp = app.post_json(endpoint + '?raise=1', params=CREATE_INPUT, status=200) assert requests_post.call_count == 1 assert requests_post.call_args[0][0] == 'https://gdema.example.net/api/request/create' diff --git a/tests/test_generic_endpoint.py b/tests/test_generic_endpoint.py index a796d1f6..3cea87f6 100644 --- a/tests/test_generic_endpoint.py +++ b/tests/test_generic_endpoint.py @@ -54,11 +54,7 @@ def arcgis(db): return instance -DEMAND_STATUS = { - 'closed': True, - 'status': 'accepted', - 'comment': 'dossier trait\xe9.' -} +DEMAND_STATUS = {'closed': True, 'status': 'accepted', 'comment': 'dossier trait\xe9.'} @mock.patch('passerelle.apps.mdel.models.Demand.get_status', lambda x: DEMAND_STATUS) @@ -187,40 +183,39 @@ class FakeConnectorBase(object): def foo5(self, request, param1='a', param2='b', param3='c'): pass - @endpoint(pattern=r'^(?P\w+)/?$', - example_pattern='{param1}/', - parameters={ - 'param1': { - 'description': 'param 1', - 'example_value': 'bar' - }}) + @endpoint( + pattern=r'^(?P\w+)/?$', + example_pattern='{param1}/', + parameters={'param1': {'description': 'param 1', 'example_value': 'bar'}}, + ) def foo6(self, request, param1, param2='a'): pass - @endpoint(description_get='foo7 get', description_post='foo7 post', - methods=['get', 'post']) + @endpoint(description_get='foo7 get', description_post='foo7 post', methods=['get', 'post']) def foo7(self, request, param1='a', param2='b', param3='c'): pass - @endpoint(long_description_get='foo7 get', long_description_post='foo7 post', - methods=['get', 'post']) + @endpoint(long_description_get='foo7 get', long_description_post='foo7 post', methods=['get', 'post']) def foo7b(self, request, param1='a', param2='b', param3='c'): pass - @endpoint(parameters={ - 'test': {'description': 'test', 'example_value': 'test'}, - 'reg': {'description': 'test', 'example_value': 'test'}, - }) + @endpoint( + parameters={ + 'test': {'description': 'test', 'example_value': 'test'}, + 'reg': {'description': 'test', 'example_value': 'test'}, + } + ) def foo8(self, request, test, reg): pass - @endpoint(post={ - 'long_description': 'foo9 post', - }) + @endpoint( + post={ + 'long_description': 'foo9 post', + } + ) def foo9(self, request): pass - @endpoint(cache_duration=10) def cached_endpoint(self, request): pass @@ -240,15 +235,18 @@ def test_endpoint_decorator(): assert connector.foo4.endpoint_info.get_params() == [ {'name': 'param1'}, {'name': 'param2', 'optional': True, 'default_value': 'a'}, - {'name': 'param3', 'optional': True, 'default_value': 'b'}] + {'name': 'param3', 'optional': True, 'default_value': 'b'}, + ] assert connector.foo5.endpoint_info.get_params() == [ {'name': 'param1', 'optional': True, 'default_value': 'a'}, {'name': 'param2', 'optional': True, 'default_value': 'b'}, - {'name': 'param3', 'optional': True, 'default_value': 'c'}] + {'name': 'param3', 'optional': True, 'default_value': 'c'}, + ] assert connector.foo6.endpoint_info.get_params() == [ {'name': 'param1', 'description': 'param 1'}, - {'name': 'param2', 'optional': True, 'default_value': 'a'}] + {'name': 'param2', 'optional': True, 'default_value': 'a'}, + ] assert connector.foo1.endpoint_info.example_url() == '/fake/connector/foo1' assert connector.foo1.endpoint_info.example_url_as_html() == '/fake/connector/foo1' @@ -257,14 +255,19 @@ def test_endpoint_decorator(): assert connector.foo5.endpoint_info.example_url() == '/fake/connector/foo5/test/' assert connector.foo5.endpoint_info.example_url_as_html() == '/fake/connector/foo5/test/' assert connector.foo6.endpoint_info.example_url() == '/fake/connector/foo6/bar/' - assert connector.foo6.endpoint_info.example_url_as_html() == '/fake/connector/foo6/param1/' + assert ( + connector.foo6.endpoint_info.example_url_as_html() + == '/fake/connector/foo6/param1/' + ) assert not '®' in connector.foo8.endpoint_info.example_url_as_html() connector.foo6.endpoint_info.pattern = None connector.foo6.endpoint_info.example_pattern = None assert connector.foo6.endpoint_info.example_url() == '/fake/connector/foo6?param1=bar' - assert (connector.foo6.endpoint_info.example_url_as_html() - == '/fake/connector/foo6?param1=param1') + assert ( + connector.foo6.endpoint_info.example_url_as_html() + == '/fake/connector/foo6?param1=param1' + ) connector.foo7.endpoint_info.http_method = 'get' assert connector.foo7.endpoint_info.description == 'foo7 get' @@ -293,12 +296,9 @@ class FakeJSONConnector(object): 'foo': { 'type': 'array', 'items': { - 'properties': { - 'id': {'type': 'integer'}, - 'bar': {'type': 'boolean'} - }, - 'required': ['id', 'bar'] - } + 'properties': {'id': {'type': 'integer'}, 'bar': {'type': 'boolean'}}, + 'required': ['id', 'bar'], + }, } } } @@ -330,25 +330,11 @@ class FakeJSONConnector(object): BAR_SCHEMA['pre_process'] = pre_process - @endpoint( - post={ - 'request_body': { - 'schema': { - 'application/json': FOO_SCHEMA - } - } - }) + @endpoint(post={'request_body': {'schema': {'application/json': FOO_SCHEMA}}}) def foo(self, request, post_data): return {'data': post_data} - @endpoint( - post={ - 'request_body': { - 'schema': { - 'application/json': BAR_SCHEMA - } - } - }) + @endpoint(post={'request_body': {'schema': {'application/json': BAR_SCHEMA}}}) def bar(self, request, post_data): return {'data': post_data} @@ -357,25 +343,26 @@ def test_endpoint_decorator_pre_process(db, app): connector = FakeJSONConnector() patch_init = mock.patch('passerelle.views.GenericConnectorMixin.init_stuff') - patch_object = mock.patch('passerelle.views.GenericEndpointView.get_object', - return_value=connector) + patch_object = mock.patch('passerelle.views.GenericEndpointView.get_object', return_value=connector) - url_foo = reverse('generic-endpoint', kwargs={ - 'connector': 'connector-json', - 'slug': 'connector-json', - 'endpoint': 'foo', - }) - url_bar = reverse('generic-endpoint', kwargs={ - 'connector': 'connector-json', - 'slug': 'connector-json', - 'endpoint': 'bar', - }) + url_foo = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'connector-json', + 'slug': 'connector-json', + 'endpoint': 'foo', + }, + ) + url_bar = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'connector-json', + 'slug': 'connector-json', + 'endpoint': 'bar', + }, + ) - payload = { - 'foo': [ - {'id': 42, 'bar': True} - ] - } + payload = {'foo': [{'id': 42, 'bar': True}]} with patch_init, patch_object: resp = app.post_json(url_foo, params=payload) assert resp.json['err'] == 0 @@ -385,12 +372,7 @@ def test_endpoint_decorator_pre_process(db, app): assert resp.json['err'] == 0 assert resp.json['data'] == payload - payload = { - 'foo': [ - {'id': 42, 'bar': True}, - {'id': None, 'bar': False} # invalid object - ] - } + payload = {'foo': [{'id': 42, 'bar': True}, {'id': None, 'bar': False}]} # invalid object with patch_init, patch_object: resp = app.post_json(url_foo, params=payload, status=400) assert resp.json['err'] == 1 @@ -398,31 +380,29 @@ def test_endpoint_decorator_pre_process(db, app): with patch_init, patch_object: resp = app.post_json(url_bar, params=payload) assert resp.json['err'] == 0 - assert resp.json['data'] == { - 'foo': [ - {'id': 42, 'bar': True} - ] - } + assert resp.json['data'] == {'foo': [{'id': 42, 'bar': True}]} class FakeConnectorDatasource(object): slug = 'connector-datasource' log_level = 'DEBUG' - payload = {'data': [ - {'id': '1', 'text': 'A'}, - {'id': '2', 'text': 'aa'}, - {'id': '3', 'text': 'aAa'}, - {'id': '4', 'text': 'AaAA'}, - {'id': '5', 'text': 'b'}, - {'id': '6', 'text': 'Bb'}, - {'id': '7', 'text': 'bbb'}, - {'id': '8', 'text': 'c'}, - {'id': '9', 'text': 'cC'}, - {'id': '10', 'text': 'Ccc'}, - {'id': '11'}, - {'foo': 'bar'}, - ]} + payload = { + 'data': [ + {'id': '1', 'text': 'A'}, + {'id': '2', 'text': 'aa'}, + {'id': '3', 'text': 'aAa'}, + {'id': '4', 'text': 'AaAA'}, + {'id': '5', 'text': 'b'}, + {'id': '6', 'text': 'Bb'}, + {'id': '7', 'text': 'bbb'}, + {'id': '8', 'text': 'c'}, + {'id': '9', 'text': 'cC'}, + {'id': '10', 'text': 'Ccc'}, + {'id': '11'}, + {'foo': 'bar'}, + ] + } def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -471,44 +451,64 @@ def test_datasource_endpoint(db, app): connector = FakeConnectorDatasource() patch_init = mock.patch('passerelle.views.GenericConnectorMixin.init_stuff') - patch_object = mock.patch('passerelle.views.GenericEndpointView.get_object', - return_value=connector) + patch_object = mock.patch('passerelle.views.GenericEndpointView.get_object', return_value=connector) - url_a = reverse('generic-endpoint', kwargs={ - 'connector': 'connector-datasource', - 'slug': 'connector-datasource', - 'endpoint': 'a', - }) - url_b = reverse('generic-endpoint', kwargs={ - 'connector': 'connector-datasource', - 'slug': 'connector-datasource', - 'endpoint': 'b', - }) - url_cached_b = reverse('generic-endpoint', kwargs={ - 'connector': 'connector-datasource', - 'slug': 'connector-datasource', - 'endpoint': 'cached_b', - }) - url_bb = reverse('generic-endpoint', kwargs={ - 'connector': 'connector-datasource', - 'slug': 'connector-datasource', - 'endpoint': 'bb', - }) - url_c = reverse('generic-endpoint', kwargs={ - 'connector': 'connector-datasource', - 'slug': 'connector-datasource', - 'endpoint': 'c', - }) - url_d = reverse('generic-endpoint', kwargs={ - 'connector': 'connector-datasource', - 'slug': 'connector-datasource', - 'endpoint': 'd', - }) - url_e = reverse('generic-endpoint', kwargs={ - 'connector': 'connector-datasource', - 'slug': 'connector-datasource', - 'endpoint': 'e', - }) + url_a = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'connector-datasource', + 'slug': 'connector-datasource', + 'endpoint': 'a', + }, + ) + url_b = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'connector-datasource', + 'slug': 'connector-datasource', + 'endpoint': 'b', + }, + ) + url_cached_b = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'connector-datasource', + 'slug': 'connector-datasource', + 'endpoint': 'cached_b', + }, + ) + url_bb = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'connector-datasource', + 'slug': 'connector-datasource', + 'endpoint': 'bb', + }, + ) + url_c = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'connector-datasource', + 'slug': 'connector-datasource', + 'endpoint': 'c', + }, + ) + url_d = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'connector-datasource', + 'slug': 'connector-datasource', + 'endpoint': 'd', + }, + ) + url_e = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'connector-datasource', + 'slug': 'connector-datasource', + 'endpoint': 'e', + }, + ) with patch_init, patch_object: resp = app.get(url_a) @@ -557,7 +557,6 @@ def test_endpoint_description_in_template(app, db): def test_endpoint_cache(app, db, monkeypatch): - @endpoint(cache_duration=10, methods=['get', 'post'], pattern=r'^(?P\w+)/$') def randominvoice(obj, request, url_param, get_param=None): return {'data': random.randint(0, pow(10, 12))} @@ -568,7 +567,6 @@ def test_endpoint_cache(app, db, monkeypatch): connector.save() class TestCache(object): - def __init__(self): self.d = dict() self.get_calls = 0 @@ -587,6 +585,7 @@ def test_endpoint_cache(app, db, monkeypatch): cache = TestCache() import passerelle.views + monkeypatch.setattr(passerelle.views, 'cache', cache) resp1 = app.get('/stub-invoices/fake/randominvoice/url_param_value/?get_param=get_param_value') @@ -600,7 +599,8 @@ def test_endpoint_cache(app, db, monkeypatch): resp3 = app.get( '/stub-invoices/fake/randominvoice/url_param_value/?get_param=get_param_value' - '&apikey=somekey×tamp=somestamp') + '&apikey=somekey×tamp=somestamp' + ) assert cache.get_calls == 3 assert cache.set_calls == 1 assert resp1.json_body == resp3.json_body @@ -622,17 +622,13 @@ def test_endpoint_cache(app, db, monkeypatch): def test_endpoint_cookies(app, db, monkeypatch, httpbin): - @endpoint(methods=['get']) def httpcall(obj, request): response = obj.requests.get(httpbin.url + '/cookies/set?foo=bar', allow_redirects=False) cookie1 = response.request.headers.get('Cookie') response = obj.requests.get(httpbin.url + '/get') cookie2 = response.request.headers.get('Cookie') - return { - 'cookie1': cookie1, - 'cookie2': cookie2 - } + return {'cookie1': cookie1, 'cookie2': cookie2} monkeypatch.setattr(StubInvoicesConnector, 'httpcall', httpcall, raising=False) @@ -663,19 +659,20 @@ def test_https_warnings(app, db, monkeypatch, httpbin_secure, relax_openssl): def test_endpoint_typed_params(app, db, monkeypatch): - - @endpoint(methods=['get'], - parameters={ - 'boolean': { - 'type': 'bool', - }, - 'integer': { - 'type': 'int', - }, - 'floating': { - 'type': 'float', - }, - }) + @endpoint( + methods=['get'], + parameters={ + 'boolean': { + 'type': 'bool', + }, + 'integer': { + 'type': 'int', + }, + 'floating': { + 'type': 'float', + }, + }, + ) def httpcall(obj, request, boolean=False, integer=1, floating=1.1): return {'boolean': boolean, 'integer': integer, 'floating': floating} @@ -722,24 +719,38 @@ def test_endpoint_typed_params(app, db, monkeypatch): def test_endpoint_params_type_detection(app, db, monkeypatch): - - @endpoint(methods=['get'], - parameters={ - 'bool_by_example': { - 'example_value': True, - }, - 'int_by_example': { - 'example_value': 1, - }, - 'float_by_example': { - 'example_value': 1.1, - }, - }) - def httpcall(obj, request, boolean=False, integer=1, floating=1.1, - bool_by_example=None, int_by_example=None, float_by_example=None): - return {'boolean': boolean, 'integer': integer, 'floating': floating, - 'bool_by_example': bool_by_example, 'int_by_example': int_by_example, - 'float_by_example': float_by_example} + @endpoint( + methods=['get'], + parameters={ + 'bool_by_example': { + 'example_value': True, + }, + 'int_by_example': { + 'example_value': 1, + }, + 'float_by_example': { + 'example_value': 1.1, + }, + }, + ) + def httpcall( + obj, + request, + boolean=False, + integer=1, + floating=1.1, + bool_by_example=None, + int_by_example=None, + float_by_example=None, + ): + return { + 'boolean': boolean, + 'integer': integer, + 'floating': floating, + 'bool_by_example': bool_by_example, + 'int_by_example': int_by_example, + 'float_by_example': float_by_example, + } monkeypatch.setattr(StubInvoicesConnector, 'httpcall', httpcall, raising=False) @@ -805,43 +816,56 @@ class DummyConnectorWithoutCheckStatus(DummyConnectorBase): pass -@pytest.mark.parametrize('connector_class, expected_status, expected_response', [ - (DummyConnectorWithCheckStatus, 200, {'err': 0}), - (DummyConnectorWithCheckStatusFailure, 200, - {'err_class': 'passerelle.utils.jsonresponse.APIError', - 'err_desc': 'service not available', 'data': None, 'err': 1}), - (DummyConnectorWithoutCheckStatus, 404, None), -]) +@pytest.mark.parametrize( + 'connector_class, expected_status, expected_response', + [ + (DummyConnectorWithCheckStatus, 200, {'err': 0}), + ( + DummyConnectorWithCheckStatusFailure, + 200, + { + 'err_class': 'passerelle.utils.jsonresponse.APIError', + 'err_desc': 'service not available', + 'data': None, + 'err': 1, + }, + ), + (DummyConnectorWithoutCheckStatus, 404, None), + ], +) def test_generic_up_endpoint(db, app, connector_class, expected_status, expected_response): connector = connector_class() connector.id = 42 - url = reverse('generic-endpoint', kwargs={ - 'connector': 'foo', - 'slug': 'foo', - 'endpoint': 'up', - }) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'foo', + 'slug': 'foo', + 'endpoint': 'up', + }, + ) patch_init = mock.patch('passerelle.views.GenericConnectorMixin.init_stuff') - patch_object = mock.patch('passerelle.views.GenericEndpointView.get_object', - return_value=connector) + patch_object = mock.patch('passerelle.views.GenericEndpointView.get_object', return_value=connector) with patch_init, patch_object: response = app.get(url, status=expected_status) if expected_response is not None: assert response.json == expected_response -@pytest.mark.parametrize('connector_class, expected', [ - (DummyConnectorWithCheckStatus, True), - (DummyConnectorWithCheckStatusFailure, True), - (DummyConnectorWithoutCheckStatus, False), -]) +@pytest.mark.parametrize( + 'connector_class, expected', + [ + (DummyConnectorWithCheckStatus, True), + (DummyConnectorWithCheckStatusFailure, True), + (DummyConnectorWithoutCheckStatus, False), + ], +) def test_generic_up_in_endpoints_infos(db, app, connector_class, expected): connector = connector_class() connector.id = 42 - up_endpoints = [ - ep for ep in connector.get_endpoints_infos() - if ep.name == 'up'] + up_endpoints = [ep for ep in connector.get_endpoints_infos() if ep.name == 'up'] if expected: assert len(up_endpoints) == 1 else: @@ -921,16 +945,21 @@ class DummyConnectorWithOrderingAndCategory(DummyConnectorBase): pass -@pytest.mark.parametrize('connector_class, expected_ordering', [ - (DummyConnectorWithoutOrdering, ['a', 'b', 'caa', 'cbb']), - (DummyConnectorWithOrdering, ['caa', 'cbb', 'a', 'b']), - (DummyConnectorWithOrderingAndCategory, ['a', 'd', 'b', 'e', 'c']), -]) +@pytest.mark.parametrize( + 'connector_class, expected_ordering', + [ + (DummyConnectorWithoutOrdering, ['a', 'b', 'caa', 'cbb']), + (DummyConnectorWithOrdering, ['caa', 'cbb', 'a', 'b']), + (DummyConnectorWithOrderingAndCategory, ['a', 'd', 'b', 'e', 'c']), + ], +) def test_generic_up_in_endpoints_ordering(db, app, connector_class, expected_ordering): connector = connector_class() connector.id = 42 - assert ['%s%s' % (ep.name, ep.pattern or '') for ep in connector.get_endpoints_infos()] == expected_ordering + assert [ + '%s%s' % (ep.name, ep.pattern or '') for ep in connector.get_endpoints_infos() + ] == expected_ordering def test_response_schema(db, app): @@ -945,8 +974,7 @@ def test_view_connector(db, app, monkeypatch, admin_user): connector.slug = 'foo' monkeypatch.setattr('passerelle.views.GenericConnectorView.model', DummyConnectorWithoutOrdering) patch_init = mock.patch('passerelle.views.GenericConnectorView.init_stuff') - patch_object = mock.patch('passerelle.views.GenericConnectorView.get_object', - return_value=connector) + patch_object = mock.patch('passerelle.views.GenericConnectorView.get_object', return_value=connector) # check description is hidden when unlogged url = reverse('view-connector', kwargs={'connector': 'dummy', 'slug': 'foo'}) diff --git a/tests/test_gesbac.py b/tests/test_gesbac.py index 55a2d033..bf299f81 100644 --- a/tests/test_gesbac.py +++ b/tests/test_gesbac.py @@ -34,17 +34,15 @@ def resource(db, sftpserver): slug='test', title='Gesbac', description='gesbac', - outcoming_sftp=SFTP( - 'sftp://foo:bar@{server.host}:{server.port}/output/'.format(server=sftpserver)), - incoming_sftp=SFTP( - 'sftp://foo:bar@{server.host}:{server.port}/input/'.format(server=sftpserver)), + outcoming_sftp=SFTP('sftp://foo:bar@{server.host}:{server.port}/output/'.format(server=sftpserver)), + incoming_sftp=SFTP('sftp://foo:bar@{server.host}:{server.port}/input/'.format(server=sftpserver)), output_files_prefix='output-', - input_files_prefix='input-') + input_files_prefix='input-', + ) def test_check_status(app, resource, sftpserver): - with sftpserver.serve_content({'input': {'test': 'content'}, - 'output': {'file': 'content'}}): + with sftpserver.serve_content({'input': {'test': 'content'}, 'output': {'file': 'content'}}): resource.check_status() @@ -72,15 +70,17 @@ def test_create_demand(app, resource, freezer, sftpserver): 'card_subject': 1, 'card_demand_reason': 1, 'card_demand_purpose': 1, - 'cards_quantity': 1 + 'cards_quantity': 1, } response = app.post_json('/gesbac/test/create-demand/', params=payload) assert resource.form_set.filter(status='new').count() == 1 form = resource.form_set.get(status='new') assert len(form.demand_data) == 2 - expected_filename = '%s%s-%s.csv' % (resource.output_files_prefix, - timestamp.strftime('%y%m%d-%H%M%S'), - form.get_gesbac_id()) + expected_filename = '%s%s-%s.csv' % ( + resource.output_files_prefix, + timestamp.strftime('%y%m%d-%H%M%S'), + form.get_gesbac_id(), + ) assert response.json['data']['filename'] == expected_filename assert response.json['data']['gesbac_id'] == '4204200' assert resource.form_set.filter(status='new').count() == 1 @@ -93,9 +93,11 @@ def test_create_demand(app, resource, freezer, sftpserver): assert resource.form_set.filter(status='new').count() == 1 form = resource.form_set.get(status='new') assert len(form.demand_data) == 2 - expected_filename = '%s%s-%s.csv' % (resource.output_files_prefix, - timestamp.strftime('%y%m%d-%H%M%S'), - form.get_gesbac_id()) + expected_filename = '%s%s-%s.csv' % ( + resource.output_files_prefix, + timestamp.strftime('%y%m%d-%H%M%S'), + form.get_gesbac_id(), + ) assert response.json['data']['filename'] == expected_filename assert response.json['data']['gesbac_id'] == '4204201' with sftpserver.serve_content({'output': {response.json['data']['filename']: 'content'}}): @@ -126,7 +128,7 @@ def test_demand_creation_limit(app, resource, freezer): 'card_subject': 1, 'card_demand_reason': 1, 'card_demand_purpose': 1, - 'cards_quantity': 1 + 'cards_quantity': 1, } for count in range(20): response = app.post_json('/gesbac/test/create-demand/', params=payload) @@ -137,8 +139,7 @@ def test_demand_creation_limit(app, resource, freezer): def test_get_demand_response(app, resource, freezer, sftpserver): - response = app.get('/gesbac/test/get-response/', - params={'gesbac_id': '42043'}, status=404) + response = app.get('/gesbac/test/get-response/', params={'gesbac_id': '42043'}, status=404) timestamp = now() payload = { 'form_id': '42-43', @@ -161,22 +162,19 @@ def test_get_demand_response(app, resource, freezer, sftpserver): 'card_subject': 1, 'card_demand_reason': 1, 'card_demand_purpose': 1, - 'cards_quantity': 1 + 'cards_quantity': 1, } response = app.post_json('/gesbac/test/create-demand/', params=payload) data = response.json['data'] - with sftpserver.serve_content({'output': {data['filename']: 'content'}, - 'input': {}}): + with sftpserver.serve_content({'output': {data['filename']: 'content'}, 'input': {}}): resource.jobs() assert resource.form_set.filter(status='closed').count() == 0 assert resource.form_set.filter(status='sent').count() == 1 data = resource.form_set.get(status='sent') gesbac_id = data.get_gesbac_id() - response_filename = '%s91001-090300-%s.csv' % ( - resource.input_files_prefix, gesbac_id) - assert resource.form_set.filter( - form_id='42-43', filename=response_filename, status='closed').count() == 0 + response_filename = '%s91001-090300-%s.csv' % (resource.input_files_prefix, gesbac_id) + assert resource.form_set.filter(form_id='42-43', filename=response_filename, status='closed').count() == 0 # files are encoded in latin-1 comment = force_text('propriétaire') @@ -184,14 +182,24 @@ def test_get_demand_response(app, resource, freezer, sftpserver): with sftpserver.serve_content({'input': {response_filename: content}}): resource.hourly() - assert resource.form_set.filter( - form_id='42-43', status='closed').count() == 1 + assert resource.form_set.filter(form_id='42-43', status='closed').count() == 1 response = resource.form_set.get(status='closed') - assert response.card_data == ['CARTE', gesbac_id, '3', '2', '1234', '', '', - '', '', '', '', '', comment] + assert response.card_data == ['CARTE', gesbac_id, '3', '2', '1234', '', '', '', '', '', '', '', comment] - response = app.get('/gesbac/test/get-response/', - params={'gesbac_id': gesbac_id}) + response = app.get('/gesbac/test/get-response/', params={'gesbac_id': gesbac_id}) assert response.json['err'] == 0 - assert response.json['data'] == ['CARTE', gesbac_id, '3', '2', '1234', '', - '', '', '', '', '', '', comment] + assert response.json['data'] == [ + 'CARTE', + gesbac_id, + '3', + '2', + '1234', + '', + '', + '', + '', + '', + '', + '', + comment, + ] diff --git a/tests/test_greco.py b/tests/test_greco.py index 45d32386..1892a1fd 100644 --- a/tests/test_greco.py +++ b/tests/test_greco.py @@ -19,34 +19,33 @@ WSDL_FILENAME = os.path.join(os.path.dirname(__file__), 'data', 'greco.wsdl') @pytest.fixture def conn(): api_user = ApiUser.objects.create(username='greco', keytype='API', key='grecokey') - connector = Greco.objects.create(title='Greco', slug='greco', - application='appid', - token_url='http://greco.example.net/token', - token_authorization='authtoken', - wsdl_url='file://%s' % WSDL_FILENAME, - verify_cert=False) + connector = Greco.objects.create( + title='Greco', + slug='greco', + application='appid', + token_url='http://greco.example.net/token', + token_authorization='authtoken', + wsdl_url='file://%s' % WSDL_FILENAME, + verify_cert=False, + ) obj_type = ContentType.objects.get_for_model(Greco) - AccessRight.objects.create(codename='can_access', - apiuser=api_user, resource_type=obj_type, - resource_pk=connector.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api_user, resource_type=obj_type, resource_pk=connector.pk + ) return connector def post_effect_503(url, *args, **kwargs): if url == 'http://greco.example.net/token': return utils.FakedResponse( - content='{"token_type": "tt", "access_token": "at", "expires_in": "600"}', - status_code=200) + content='{"token_type": "tt", "access_token": "at", "expires_in": "600"}', status_code=200 + ) return utils.FakedResponse(content='Service Unavailable', status_code=503) @mock.patch('passerelle.utils.Request.post', side_effect=post_effect_503) def test_greco_ping(mocked_post, app, conn): - url = reverse('generic-endpoint', kwargs={ - 'connector': 'greco', - 'endpoint': 'ping', - 'slug': conn.slug - }) + url = reverse('generic-endpoint', kwargs={'connector': 'greco', 'endpoint': 'ping', 'slug': conn.slug}) app.get(url, status=403) assert mocked_post.call_count == 0 diff --git a/tests/test_grenoble_gru.py b/tests/test_grenoble_gru.py index 8378a5e2..60f9835f 100644 --- a/tests/test_grenoble_gru.py +++ b/tests/test_grenoble_gru.py @@ -31,9 +31,8 @@ from passerelle.contrib.grenoble_gru.models import GrenobleGRU @pytest.fixture def setup(db): return utils.setup_access_rights( - GrenobleGRU.objects.create( - slug='test', - base_url='https://itvvoirie-test.lametro.fr')) + GrenobleGRU.objects.create(slug='test', base_url='https://itvvoirie-test.lametro.fr') + ) BASIC_PAYLOAD = { @@ -55,7 +54,7 @@ BASIC_PAYLOAD = { "urgent_demand": '1', "dysfonction_type": '3', "intervention_reason": '2', - 'dem_pav': 'déchetterie' + 'dem_pav': 'déchetterie', } DEMAND = """ @@ -133,11 +132,7 @@ def test_contact_mode_typologies_list_with_invalid_xml(app, setup): request_post.return_value = response endpoint = reverse( 'generic-endpoint', - kwargs={ - 'connector': 'grenoble-gru', - 'slug': setup.slug, - 'endpoint': 'contact-modes' - } + kwargs={'connector': 'grenoble-gru', 'slug': setup.slug, 'endpoint': 'contact-modes'}, ) response = app.get(endpoint) request_post.assert_called_once() @@ -154,11 +149,7 @@ def test_contact_mode_typologies_list(app, setup): request_post.return_value = response endpoint = reverse( 'generic-endpoint', - kwargs={ - 'connector': 'grenoble-gru', - 'slug': setup.slug, - 'endpoint': 'contact-modes' - } + kwargs={'connector': 'grenoble-gru', 'slug': setup.slug, 'endpoint': 'contact-modes'}, ) response = app.get(endpoint) # check that data is in cache @@ -190,14 +181,13 @@ def test_demand_creation_return_codes(app, setup, remote_response): with mock.patch('passerelle.utils.Request.post') as request_post: request_post.side_effect = [get_typo_response(), remote_response] response = app.post_json( - reverse('generic-endpoint', - kwargs={ - 'connector': 'grenoble-gru', - 'slug': setup.slug, - 'endpoint': 'create-demand' - }), + reverse( + 'generic-endpoint', + kwargs={'connector': 'grenoble-gru', 'slug': setup.slug, 'endpoint': 'create-demand'}, + ), params=BASIC_PAYLOAD, - status=200) + status=200, + ) index = remote_response.index if index == 0: assert response.json['data'] == 'Demand successfully created' @@ -221,13 +211,12 @@ def test_demand_creation_with_invalid_params(app, setup): invalid_payload = BASIC_PAYLOAD.copy() invalid_payload['applicant_status'] = 42 response = app.post_json( - reverse('generic-endpoint', - kwargs={ - 'connector': 'grenoble-gru', - 'slug': setup.slug, - 'endpoint': 'create-demand' - }), - params=invalid_payload) + reverse( + 'generic-endpoint', + kwargs={'connector': 'grenoble-gru', 'slug': setup.slug, 'endpoint': 'create-demand'}, + ), + params=invalid_payload, + ) assert response.json['err'] == 1 assert 'applicant_status must be one of' in response.json['err_desc'] assert response.json['data'] is None @@ -240,13 +229,12 @@ def test_demand_creation_params(app, setup): payload['intervention_free_address'] = u'169, rue du Château' payload['applicant_free_address'] = u'1, rue de l\'Est' app.post_json( - reverse('generic-endpoint', - kwargs={ - 'connector': 'grenoble-gru', - 'slug': setup.slug, - 'endpoint': 'create-demand' - }), - params=payload) + reverse( + 'generic-endpoint', + kwargs={'connector': 'grenoble-gru', 'slug': setup.slug, 'endpoint': 'create-demand'}, + ), + params=payload, + ) assert request_post.call_args[1]['data']['int_adresse_manuelle'] == u'169, rue du Château' assert request_post.call_args[1]['data']['dem_adresse_manuelle'] == u'1, rue de l\'Est' assert request_post.call_args[1]['data']['obs_demande_urgente'] == 1 @@ -260,26 +248,23 @@ def test_demand_creation_params(app, setup): app.post_json( reverse( 'generic-endpoint', - kwargs={ - 'connector': 'grenoble-gru', - 'slug': setup.slug, - 'endpoint': 'create-demand' - }), + kwargs={'connector': 'grenoble-gru', 'slug': setup.slug, 'endpoint': 'create-demand'}, + ), params=payload, - status=200) + status=200, + ) assert request_post.call_args[1]['data']['obs_demande_urgente'] == 1 assert request_post.call_args[1]['data']['dem_reponse'] == 1 # make sure empty dem_pav value is not sent payload['dem_pav'] = '' app.post_json( - reverse('generic-endpoint', - kwargs={ - 'connector': 'grenoble-gru', - 'slug': setup.slug, - 'endpoint': 'create-demand' - }), - params=payload) + reverse( + 'generic-endpoint', + kwargs={'connector': 'grenoble-gru', 'slug': setup.slug, 'endpoint': 'create-demand'}, + ), + params=payload, + ) assert 'dem_pav' not in request_post.call_args[1]['data'] @@ -294,18 +279,18 @@ def test_add_attachment_to_demand(app, setup): 'connector': 'grenoble-gru', 'slug': setup.slug, 'endpoint': 'demand', - 'rest': '42-42/add-attachment/' - }) + 'rest': '42-42/add-attachment/', + }, + ) payload = {'file': 'test.jpg'} response = app.post_json(endpoint, params=payload, status=200) assert response.json['data'] is None assert response.json['err'] == 1 assert response.json['err_desc'] == 'file should be a dict' - file_b64_content = '/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAUDBAQEAwUEBAQFBQUGBwwIBwcHBw8LCwkMEQ8SEhEPERETFh' - payload['file'] = { - 'content_type': 'image/jpg', - 'filename': 'photo.jpg' - } + file_b64_content = ( + '/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAUDBAQEAwUEBAQFBQUGBwwIBwcHBw8LCwkMEQ8SEhEPERETFh' + ) + payload['file'] = {'content_type': 'image/jpg', 'filename': 'photo.jpg'} response = app.post_json(endpoint, params=payload, status=400) assert response.json['data'] is None assert response.json['err'] == 1 @@ -315,8 +300,10 @@ def test_add_attachment_to_demand(app, setup): assert request_post.call_args[1]['data']['dem_tiers_id'] == '42-42' assert 'filename=photo.jpg' in request_post.call_args[1]['data']['piece_jointe'] assert 'filetype=image%2Fjpg' in request_post.call_args[1]['data']['piece_jointe'] - assert ('filecontent=%2F9j%2F4AAQSkZJRgABAQAAAQABAAD%2F2wBDAAUDBAQEAwUEBAQFBQUGBwwIBwcHBw8LCwkMEQ8SEhEPERETFh' - in request_post.call_args[1]['data']['piece_jointe']) + assert ( + 'filecontent=%2F9j%2F4AAQSkZJRgABAQAAAQABAAD%2F2wBDAAUDBAQEAwUEBAQFBQUGBwwIBwcHBw8LCwkMEQ8SEhEPERETFh' + in request_post.call_args[1]['data']['piece_jointe'] + ) def test_get_demand(app, setup): @@ -325,13 +312,15 @@ def test_get_demand(app, setup): response.content = DEMAND.replace('\n', '') request_post.return_value = response response = app.get( - reverse('generic-endpoint', - kwargs={ - 'connector': 'grenoble-gru', - 'slug': setup.slug, - 'endpoint': 'demand', - 'rest': '42-42/' - }) + reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'grenoble-gru', + 'slug': setup.slug, + 'endpoint': 'demand', + 'rest': '42-42/', + }, + ) ) assert request_post.call_args[1]['data']['dem_tiers_id'] == '42-42' demand = response.json['data'] @@ -347,13 +336,15 @@ def test_get_demand_with_invalid_xml(app, setup): response.content = DEMAND.replace('\n', '').replace('de Grenoble', '& Grenoble') request_post.return_value = response response = app.get( - reverse('generic-endpoint', - kwargs={ - 'connector': 'grenoble-gru', - 'slug': setup.slug, - 'endpoint': 'demand', - 'rest': '42-42/' - }) + reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'grenoble-gru', + 'slug': setup.slug, + 'endpoint': 'demand', + 'rest': '42-42/', + }, + ) ) assert request_post.call_args[1]['data']['dem_tiers_id'] == '42-42' assert response.json['err'] @@ -365,16 +356,16 @@ def test_get_pavs(app, setup): with mock.patch('passerelle.utils.Request.post') as request_post: response = mock.Mock() json_response = mock.Mock() - json_response.return_value = json.load(open(os.path.join(os.path.dirname(__file__), 'data', 'grenoble_gru_pavs.json'))) + json_response.return_value = json.load( + open(os.path.join(os.path.dirname(__file__), 'data', 'grenoble_gru_pavs.json')) + ) response.json = json_response request_post.return_value = response response = app.get( - reverse('generic-endpoint', - kwargs={ - 'connector': 'grenoble-gru', - 'slug': setup.slug, - 'endpoint': 'pavs' - }) + reverse( + 'generic-endpoint', + kwargs={'connector': 'grenoble-gru', 'slug': setup.slug, 'endpoint': 'pavs'}, + ) ) assert response.json['err'] == 0 for item in response.json['data']: diff --git a/tests/test_import_export.py b/tests/test_import_export.py index af609d76..45662593 100644 --- a/tests/test_import_export.py +++ b/tests/test_import_export.py @@ -56,22 +56,30 @@ def get_file_content(filename): @pytest.fixture def setup(): - - def maker(columns_keynames='fam,id,lname,fname,sex', filename='data.csv', sheet_name='Feuille2', - data=b''): + def maker( + columns_keynames='fam,id,lname,fname,sex', filename='data.csv', sheet_name='Feuille2', data=b'' + ): api = ApiUser.objects.create(username='all', keytype='', key='') - csv = CsvDataSource.objects.create(csv_file=File(BytesIO(data), filename), - sheet_name=sheet_name, columns_keynames=columns_keynames, - slug='test', title='a title', - description='a description') + csv = CsvDataSource.objects.create( + csv_file=File(BytesIO(data), filename), + sheet_name=sheet_name, + columns_keynames=columns_keynames, + slug='test', + title='a title', + description='a description', + ) obj_type = ContentType.objects.get_for_model(csv) - AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, - resource_pk=csv.pk) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'csvdatasource', - 'slug': csv.slug, - 'endpoint': 'data', - }) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=csv.pk + ) + url = reverse( + 'generic-endpoint', + kwargs={ + 'connector': 'csvdatasource', + 'slug': csv.slug, + 'endpoint': 'data', + }, + ) return csv, url return maker @@ -107,8 +115,7 @@ def clear(): def test_export_csvdatasource(app, setup, filetype): - csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, - data=get_file_content(filetype)) + csvdata, url = setup('id,whatever,nom,prenom,sexe', filename=filetype, data=get_file_content(filetype)) query = Query(slug='query-1_', resource=csvdata, structure='array') query.projections = '\n'.join(['id:int(id)', 'prenom:prenom']) query.save() @@ -189,6 +196,7 @@ def test_export_to_file(app, setup, filetype): import_site(json.load(open(f.name)), overwrite=True) assert Bdp.objects.count() == 1 + def test_export_log_level(app, setup): bdp = Bdp.objects.create(service_url='https://bdp.example.com/') bdp.set_log_level('DEBUG') @@ -205,8 +213,7 @@ def test_export_access_rights(app, setup): bdp = Bdp.objects.create(service_url='https://bdp.example.com/') obj_type = ContentType.objects.get_for_model(bdp) api = ApiUser.objects.create(username='all', keytype='', key='') - AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, - resource_pk=bdp.pk) + AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=bdp.pk) first = export_site() Bdp.objects.all().delete() AccessRight.objects.all().delete() @@ -233,7 +240,7 @@ def test_export_base_adresse(): api_geo_url='https://geo.api.gouv.fr/', zipcode='75013', latitude=0.2, - longitude=1.4 + longitude=1.4, ) ba_export = ba.export_json() new_ba = BaseAdresse.import_json(ba_export, overwrite=True) diff --git a/tests/test_iparapheur.py b/tests/test_iparapheur.py index 5b762a79..29f4a036 100644 --- a/tests/test_iparapheur.py +++ b/tests/test_iparapheur.py @@ -23,33 +23,41 @@ pytestmark = pytest.mark.django_db BASE_URL = 'https://secure-iparapheur.demonstrations.adullact.org:443/ws-iparapheur' WSDL_URL = '%s?wsdl' % BASE_URL API_KEY = 'iparapheur' -SOAP_NAMESPACES = {'soap': 'http://schemas.xmlsoap.org/soap/envelope/', - 'ns1': 'http://www.adullact.org/spring-ws/iparapheur/1.0', - 'xmlmime': 'http://www.w3.org/2005/05/xmlmime' +SOAP_NAMESPACES = { + 'soap': 'http://schemas.xmlsoap.org/soap/envelope/', + 'ns1': 'http://www.adullact.org/spring-ws/iparapheur/1.0', + 'xmlmime': 'http://www.w3.org/2005/05/xmlmime', } + @pytest.fixture def conn(): - api = ApiUser.objects.create(username='iparapheur', - keytype='API', - key=API_KEY) - conn = IParapheur.objects.create(title='parapheur', slug='parapheur', - wsdl_url=WSDL_URL, basic_auth_username='test', - basic_auth_password='secret') + api = ApiUser.objects.create(username='iparapheur', keytype='API', key=API_KEY) + conn = IParapheur.objects.create( + title='parapheur', + slug='parapheur', + wsdl_url=WSDL_URL, + basic_auth_username='test', + basic_auth_password='secret', + ) obj_type = ContentType.objects.get_for_model(IParapheur) - AccessRight.objects.create(codename='can_access', - apiuser=api, resource_type=obj_type, - resource_pk=conn.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=conn.pk + ) return conn + def assert_invalid_xml(err_desc): assert "Server returned response (200) with invalid XML" in err_desc + def xmlmime(): - return os.path.join(os.path.dirname(__file__), 'data','xmlmime.xml') + return os.path.join(os.path.dirname(__file__), 'data', 'xmlmime.xml') + def wsdl_file(): - return os.path.join(os.path.dirname(__file__), 'data','iparapheur.wsdl') + return os.path.join(os.path.dirname(__file__), 'data', 'iparapheur.wsdl') + def iph_mocked_get(url, params=None, **kwargs): response = Response() @@ -70,6 +78,7 @@ def iph_mocked_get(url, params=None, **kwargs): response.status_code = 200 return response + @mock.patch('passerelle.contrib.iparapheur.models.IParapheur.get_client') def test_call_ping(soap_client, app, conn): service = mock.Mock() @@ -78,26 +87,30 @@ def test_call_ping(soap_client, app, conn): class MockedSettings(object): def __init__(self, **kwargs): pass + def __enter__(self): pass + def __exit__(self, exc_type, exc_value, traceback): pass mocked_client = mock.Mock(overridden_service=service, settings=MockedSettings) soap_client.return_value = mocked_client - url = reverse('generic-endpoint', kwargs={'connector': 'iparapheur', - 'endpoint': 'ping', 'slug': conn.slug}) + url = reverse( + 'generic-endpoint', kwargs={'connector': 'iparapheur', 'endpoint': 'ping', 'slug': conn.slug} + ) resp = app.get(url, status=403) url += '?apikey=%s' % API_KEY resp = app.get(url) assert resp.json['err'] == 0 assert resp.json['data'] == 'pong' -@mock.patch('passerelle.base.models.BaseResource.soap_client', - side_effect=ConnectionError('mocked error')) + +@mock.patch('passerelle.base.models.BaseResource.soap_client', side_effect=ConnectionError('mocked error')) def test_call_ping_connectionerror(soap_client, app, conn): - url = reverse('generic-endpoint', kwargs={'connector': 'iparapheur', - 'endpoint': 'ping', 'slug': conn.slug}) + url = reverse( + 'generic-endpoint', kwargs={'connector': 'iparapheur', 'endpoint': 'ping', 'slug': conn.slug} + ) resp = app.get(url, status=403) url += '?apikey=%s' % API_KEY resp = app.get(url) @@ -105,6 +118,7 @@ def test_call_ping_connectionerror(soap_client, app, conn): assert resp.json['data'] == None assert 'mocked error' in resp.json['err_desc'] + @mock.patch('passerelle.utils.Request.get', side_effect=iph_mocked_get) @mock.patch('passerelle.utils.Request.post') def test_create_file(mocked_post, mocked_get, app, conn): @@ -117,22 +131,26 @@ def test_create_file(mocked_post, mocked_get, app, conn): response.status_code = 200 for filename in ('iparapheur_test.odt', 'iparapheur_test.pdf'): - soap_response = """OKDossier %s soumis dans le circuitINFO%s""" % (filename, file_id) + soap_response = ( + """OKDossier %s soumis dans le circuitINFO%s""" + % (filename, file_id) + ) response._content = force_bytes(soap_response) mocked_post.return_value = response title, ext = filename.split('.') base64_data = 'VGVzdCBEb2N1bWVudA==' data = { - 'type': typ, 'subtype': subtyp, 'visibility': visibility, + 'type': typ, + 'subtype': subtyp, + 'visibility': visibility, 'title': title, - 'file': { - 'content': base64_data, - 'content_type': 'application/pdf' - } + 'file': {'content': base64_data, 'content_type': 'application/pdf'}, } - url = reverse('generic-endpoint', kwargs={'connector': 'iparapheur', - 'endpoint': 'create-file', 'slug': conn.slug}) + url = reverse( + 'generic-endpoint', + kwargs={'connector': 'iparapheur', 'endpoint': 'create-file', 'slug': conn.slug}, + ) resp = app.post_json(url, params=data, status=403) url += '?apikey=%s' % API_KEY resp = app.post_json(url, params=data) @@ -150,22 +168,23 @@ def test_create_file(mocked_post, mocked_get, app, conn): # Missing dossier_id in response title = 'foo' - soap_response = """OKDossier %s soumis dans le circuitINFO""" % title + soap_response = ( + """OKDossier %s soumis dans le circuitINFO""" + % title + ) response._content = force_bytes(soap_response) mocked_post.return_value = response base64_data = 'VGVzdCBEb2N1bWVudA==' data = { - 'type': typ, 'subtype': subtyp, 'visibility': visibility, + 'type': typ, + 'subtype': subtyp, + 'visibility': visibility, 'title': title, - 'file': { - 'content': base64_data, - 'content_type': 'application/pdf' - } + 'file': {'content': base64_data, 'content_type': 'application/pdf'}, } url = reverse( - 'generic-endpoint', - kwargs={'connector': 'iparapheur', 'endpoint': 'create-file', 'slug': conn.slug} + 'generic-endpoint', kwargs={'connector': 'iparapheur', 'endpoint': 'create-file', 'slug': conn.slug} ) resp = app.post_json(url, params=data, status=403) url += '?apikey=%s' % API_KEY @@ -205,8 +224,9 @@ def test_create_file(mocked_post, mocked_get, app, conn): # Unknown value for "visibility" err_data = data.copy() err_data['visibility'] = 'UNKNOWN_VISIBILITY' - url = reverse('generic-endpoint', kwargs={'connector': 'iparapheur', - 'endpoint': 'create-file', 'slug': conn.slug}) + url = reverse( + 'generic-endpoint', kwargs={'connector': 'iparapheur', 'endpoint': 'create-file', 'slug': conn.slug} + ) url += '?apikey=%s' % API_KEY resp = app.post_json(url, params=err_data) assert resp.json['err'] == 1 @@ -214,8 +234,9 @@ def test_create_file(mocked_post, mocked_get, app, conn): # OK, providing email data['email'] = email - url = reverse('generic-endpoint', kwargs={'connector': 'iparapheur', - 'endpoint': 'create-file', 'slug': conn.slug}) + url = reverse( + 'generic-endpoint', kwargs={'connector': 'iparapheur', 'endpoint': 'create-file', 'slug': conn.slug} + ) url += '?apikey=%s' % API_KEY soap_response = """""" response._content = force_bytes(soap_response) @@ -226,6 +247,7 @@ def test_create_file(mocked_post, mocked_get, app, conn): assert 'Transport Error' in resp.json['err_desc'] assert_invalid_xml(resp.json['err_desc']) + @mock.patch('passerelle.utils.Request.get', side_effect=iph_mocked_get) @mock.patch('passerelle.utils.Request.post') def test_files(mocked_post, mocked_get, app, conn): @@ -235,8 +257,9 @@ def test_files(mocked_post, mocked_get, app, conn): """ response._content = force_bytes(soap_response) mocked_post.return_value = response - url = reverse('generic-endpoint', kwargs={'connector': 'iparapheur', - 'endpoint': 'files', 'slug': conn.slug}) + url = reverse( + 'generic-endpoint', kwargs={'connector': 'iparapheur', 'endpoint': 'files', 'slug': conn.slug} + ) resp = app.get(url, status=403) url += '?apikey=%s' % API_KEY resp = app.get(url) @@ -258,6 +281,7 @@ def test_files(mocked_post, mocked_get, app, conn): assert item['id'] assert item['timestamp'] + @mock.patch('passerelle.utils.Request.get', side_effect=iph_mocked_get) @mock.patch('passerelle.utils.Request.post') def test_get_file_status(mocked_post, mocked_get, app, conn): @@ -269,9 +293,10 @@ def test_get_file_status(mocked_post, mocked_get, app, conn): """ response._content = force_bytes(soap_response) mocked_post.return_value = response - url = reverse('generic-endpoint', kwargs={'connector': 'iparapheur', - 'endpoint': 'get-file-status', 'slug': conn.slug, - 'rest': file_id}) + url = reverse( + 'generic-endpoint', + kwargs={'connector': 'iparapheur', 'endpoint': 'get-file-status', 'slug': conn.slug, 'rest': file_id}, + ) resp = app.get(url, status=403) url += '?apikey=%s' % API_KEY resp = app.get(url) @@ -279,12 +304,13 @@ def test_get_file_status(mocked_post, mocked_get, app, conn): data = resp.json['data'] assert data['status'] == 'Archive' assert data['nom'] == 'webservices gru' - expected_ts = timezone.make_naive(parser.parse('2016-04-05T17:58:46.727+02:00'), - timezone.get_current_timezone()) + expected_ts = timezone.make_naive( + parser.parse('2016-04-05T17:58:46.727+02:00'), timezone.get_current_timezone() + ) assert data['timestamp'] in ( - expected_ts.strftime('%Y-%m-%dT%H:%M:%S.000'), # suds 0.4 - '2016-04-05T17:58:46.727+02:00', # suds 0.7 - ) + expected_ts.strftime('%Y-%m-%dT%H:%M:%S.000'), # suds 0.4 + '2016-04-05T17:58:46.727+02:00', # suds 0.7 + ) # KO soap_response = """KOKOmessageINFO""" @@ -313,6 +339,7 @@ def test_get_file_status(mocked_post, mocked_get, app, conn): assert 'passerelle.utils.jsonresponse.APIError' in resp.json['err_class'] assert_invalid_xml(resp.json['err_desc']) + @mock.patch('passerelle.utils.Request.get', side_effect=iph_mocked_get) @mock.patch('passerelle.utils.Request.post') def test_get_file(mocked_post, mocked_get, app, conn): @@ -320,14 +347,16 @@ def test_get_file(mocked_post, mocked_get, app, conn): response = Response() response.status_code = 200 - soap_response = open(os.path.join(os.path.dirname(__file__), - 'data/iparapheur_get_file_response.xml'), 'rb').read() + soap_response = open( + os.path.join(os.path.dirname(__file__), 'data/iparapheur_get_file_response.xml'), 'rb' + ).read() response._content = force_bytes(soap_response) mocked_post.return_value = response - url = reverse('generic-endpoint', kwargs={'slug': conn.slug, - 'connector': 'iparapheur', 'endpoint': 'get-file', - 'rest': file_id}) + url = reverse( + 'generic-endpoint', + kwargs={'slug': conn.slug, 'connector': 'iparapheur', 'endpoint': 'get-file', 'rest': file_id}, + ) resp = app.get(url, status=403) url += '?apikey=%s' % API_KEY resp = app.get(url) @@ -362,6 +391,7 @@ def test_get_file(mocked_post, mocked_get, app, conn): assert 'passerelle.utils.jsonresponse.APIError' in resp.json['err_class'] assert 'XMLSyntax Error' in resp.json['err_desc'] + @mock.patch('passerelle.utils.Request.get', side_effect=iph_mocked_get) @mock.patch('passerelle.utils.Request.post') def test_get_file_invalid_appendix(mocked_post, mocked_get, app, conn): @@ -369,19 +399,22 @@ def test_get_file_invalid_appendix(mocked_post, mocked_get, app, conn): response = Response() response.status_code = 200 - soap_response = open(os.path.join(os.path.dirname(__file__), - 'data/iparapheur_get_file_response.xml'), 'rb').read() + soap_response = open( + os.path.join(os.path.dirname(__file__), 'data/iparapheur_get_file_response.xml'), 'rb' + ).read() response._content = soap_response mocked_post.return_value = response - url = reverse('generic-endpoint', kwargs={'slug': conn.slug, - 'connector': 'iparapheur', 'endpoint': 'get-file', - 'rest': file_id}) + url = reverse( + 'generic-endpoint', + kwargs={'slug': conn.slug, 'connector': 'iparapheur', 'endpoint': 'get-file', 'rest': file_id}, + ) url += '?apikey=%s&appendix=invalid' % API_KEY resp = app.get(url, status=404) assert resp.json['err'] == 1 assert resp.json['err_desc'] == 'invalid appendix index' + @mock.patch('passerelle.utils.Request.get', side_effect=iph_mocked_get) @mock.patch('passerelle.utils.Request.post') def test_get_file_not_found_appendix(mocked_post, mocked_get, app, conn): @@ -389,19 +422,22 @@ def test_get_file_not_found_appendix(mocked_post, mocked_get, app, conn): response = Response() response.status_code = 200 - soap_response = open(os.path.join(os.path.dirname(__file__), - 'data/iparapheur_get_file_response.xml'), 'rb').read() + soap_response = open( + os.path.join(os.path.dirname(__file__), 'data/iparapheur_get_file_response.xml'), 'rb' + ).read() response._content = soap_response mocked_post.return_value = response - url = reverse('generic-endpoint', kwargs={'slug': conn.slug, - 'connector': 'iparapheur', 'endpoint': 'get-file', - 'rest': file_id}) + url = reverse( + 'generic-endpoint', + kwargs={'slug': conn.slug, 'connector': 'iparapheur', 'endpoint': 'get-file', 'rest': file_id}, + ) url += '?apikey=%s&appendix=10' % API_KEY resp = app.get(url, status=404) assert resp.json['err'] == 1 assert resp.json['err_desc'] == 'unknown appendix' + @mock.patch('passerelle.utils.Request.get', side_effect=iph_mocked_get) @mock.patch('passerelle.utils.Request.post') def test_get_file_appendix(mocked_post, mocked_get, app, conn): @@ -409,14 +445,16 @@ def test_get_file_appendix(mocked_post, mocked_get, app, conn): response = Response() response.status_code = 200 - soap_response = open(os.path.join(os.path.dirname(__file__), - 'data/iparapheur_get_file_response.xml'), 'rb').read() + soap_response = open( + os.path.join(os.path.dirname(__file__), 'data/iparapheur_get_file_response.xml'), 'rb' + ).read() response._content = soap_response mocked_post.return_value = response - url = reverse('generic-endpoint', kwargs={'slug': conn.slug, - 'connector': 'iparapheur', 'endpoint': 'get-file', - 'rest': file_id}) + url = reverse( + 'generic-endpoint', + kwargs={'slug': conn.slug, 'connector': 'iparapheur', 'endpoint': 'get-file', 'rest': file_id}, + ) url += '?apikey=%s&appendix=0' % API_KEY resp = app.get(url) assert resp.headers['Content-Type'] == 'application/pdf' @@ -424,6 +462,7 @@ def test_get_file_appendix(mocked_post, mocked_get, app, conn): assert 'filename*=UTF-8\'\'iParapheur_impression_dossier.pdf' in resp.headers['Content-Disposition'] assert resp.text == 'Test Document Appendix' + @mock.patch('passerelle.utils.Request.get', side_effect=iph_mocked_get) @mock.patch('passerelle.utils.Request.post') def test_invalid_response(mocked_post, mocked_get, app, conn): @@ -431,26 +470,32 @@ def test_invalid_response(mocked_post, mocked_get, app, conn): response = Response() response.status_code = 502 - soap_response = open(os.path.join(os.path.dirname(__file__), - 'data/iparapheur_get_file_response.xml'), 'rb').read() + soap_response = open( + os.path.join(os.path.dirname(__file__), 'data/iparapheur_get_file_response.xml'), 'rb' + ).read() response._content = '

      Bad Gateway

      ' response.raison = 'Bad Gateway' mocked_post.return_value = response - url = reverse('generic-endpoint', kwargs={'slug': conn.slug, - 'connector': 'iparapheur', 'endpoint': 'get-file-status', 'rest': file_id}) + url = reverse( + 'generic-endpoint', + kwargs={'slug': conn.slug, 'connector': 'iparapheur', 'endpoint': 'get-file-status', 'rest': file_id}, + ) url += '?apikey=%s' % API_KEY resp = app.get(url) assert resp.json['err_desc'] == "ServiceError: Unknown fault occured" + @mock.patch('passerelle.utils.Request.get', side_effect=iph_mocked_get) @mock.patch('passerelle.utils.Request.post') def test_webfault_response(mocked_post, mocked_get, app, conn): file_id = str(uuid.uuid4()) response = Response() response.status_code = 200 - url = reverse('generic-endpoint', kwargs={'slug': conn.slug, - 'connector': 'iparapheur', 'endpoint': 'get-file-status', 'rest': file_id}) + url = reverse( + 'generic-endpoint', + kwargs={'slug': conn.slug, 'connector': 'iparapheur', 'endpoint': 'get-file-status', 'rest': file_id}, + ) url += '?apikey=%s' % API_KEY webfault_response = """ @@ -475,22 +520,25 @@ def test_webfault_response(mocked_post, mocked_get, app, conn): @mock.patch('passerelle.utils.Request.get', side_effect=iph_mocked_get) def test_call_wsdl(mocked_get, app, conn): - url = reverse('generic-endpoint', - kwargs={'slug': conn.slug, - 'connector': 'iparapheur', 'endpoint': 'wsdl'}) + url = reverse( + 'generic-endpoint', kwargs={'slug': conn.slug, 'connector': 'iparapheur', 'endpoint': 'wsdl'} + ) resp = app.get(url) assert resp.headers['content-type'] == 'text/xml' assert resp.content == open(wsdl_file(), 'rb').read() + @mock.patch('passerelle.utils.Request.get', side_effect=ConnectionError('mocked error')) def test_call_wsdl_connectionerror(mocked_get, app, conn): - url = reverse('generic-endpoint', kwargs={'connector': 'iparapheur', - 'endpoint': 'wsdl', 'slug': conn.slug}) + url = reverse( + 'generic-endpoint', kwargs={'connector': 'iparapheur', 'endpoint': 'wsdl', 'slug': conn.slug} + ) resp = app.get(url) assert resp.json['err'] == 1 assert resp.json['data'] is None assert 'mocked error' in resp.json['err_desc'] + @mock.patch('passerelle.utils.Request.get') @mock.patch('zeep.Transport._load_remote_data') @mock.patch('passerelle.utils.Request.post') @@ -505,13 +553,16 @@ def test_no_auth_on_wsdl_imports(mocked_post, mocked_load, mocked_get, app, conn response_xmlmime, response_post = Response(), Response() response_xmlmime.status_code, response_post.status_code = 200, 200 response_xmlmime._content = open(xmlmime(), 'rb').read() - response_post._content = force_bytes("""[publik_test] m'a dit: "ping"! -""") + response_post._content = force_bytes( + """[publik_test] m'a dit: "ping"! +""" + ) mocked_load.return_value = open(wsdl_file(), 'rb').read() mocked_get.return_value = response_xmlmime mocked_post.return_value = response_post - url = reverse('generic-endpoint', kwargs={'connector': 'iparapheur', - 'endpoint': 'ping', 'slug': conn.slug}) + url = reverse( + 'generic-endpoint', kwargs={'connector': 'iparapheur', 'endpoint': 'ping', 'slug': conn.slug} + ) resp = app.get(url, status=403) url += '?apikey=%s' % API_KEY @@ -519,6 +570,7 @@ def test_no_auth_on_wsdl_imports(mocked_post, mocked_load, mocked_get, app, conn assert resp.json['err'] == 0 assert resp.json['data'] == "[publik_test] m'a dit: \"ping\"!" + @mock.patch('passerelle.utils.Request.get', side_effect=iph_mocked_get) @mock.patch('passerelle.utils.Request.post') def test_types(mocked_post, mocked_get, app, conn): @@ -531,13 +583,15 @@ def test_types(mocked_post, mocked_get, app, conn): response._content = force_bytes(soap_response) mocked_post.return_value = response - url = reverse('generic-endpoint', kwargs={'slug': conn.slug, 'connector': 'iparapheur', - 'endpoint': 'types'}) + url = reverse( + 'generic-endpoint', kwargs={'slug': conn.slug, 'connector': 'iparapheur', 'endpoint': 'types'} + ) url += '?apikey=%s' % API_KEY resp = app.get(url) assert resp.json['err'] == 0 assert resp.json['data'] == [] + @mock.patch('passerelle.utils.Request.get', side_effect=iph_mocked_get) @mock.patch('passerelle.utils.Request.post') def test_subtypes(mocked_post, mocked_get, app, conn): @@ -546,13 +600,17 @@ def test_subtypes(mocked_post, mocked_get, app, conn): response.status_code = 200 # error: no parameter provided - url = reverse('generic-endpoint', kwargs={'slug': conn.slug, 'connector': 'iparapheur', - 'endpoint': 'subtypes'}) + url = reverse( + 'generic-endpoint', kwargs={'slug': conn.slug, 'connector': 'iparapheur', 'endpoint': 'subtypes'} + ) resp = app.get(url, status=403) url += '?apikey=%s' % API_KEY resp = app.get(url) assert resp.json['err'] == 1 - assert resp.json['err_desc'] == "Type Error: TypeTechnique() takes exactly 1 argument (0 given). Simple types expect only a single value argument" + assert ( + resp.json['err_desc'] + == "Type Error: TypeTechnique() takes exactly 1 argument (0 given). Simple types expect only a single value argument" + ) # providing a type as parameter soap_response = """""" @@ -564,6 +622,7 @@ def test_subtypes(mocked_post, mocked_get, app, conn): assert resp.json['err'] == 0 assert resp.json['data'] == [] + @mock.patch('passerelle.utils.Request.get', side_effect=iph_mocked_get) @mock.patch('passerelle.utils.Request.post') def test_call_ping_overrinding_endpoint_url(mocked_post, mocked_get, app, conn): @@ -575,8 +634,9 @@ def test_call_ping_overrinding_endpoint_url(mocked_post, mocked_get, app, conn): conn.wsdl_endpoint_location = 'http://www.new.location.com' conn.save() - url = reverse('generic-endpoint', kwargs={'connector': 'iparapheur', - 'endpoint': 'ping', 'slug': conn.slug}) + url = reverse( + 'generic-endpoint', kwargs={'connector': 'iparapheur', 'endpoint': 'ping', 'slug': conn.slug} + ) url += '?apikey=%s' % API_KEY app.get(url) assert mocked_post.call_args[0][0] == 'http://www.new.location.com' diff --git a/tests/test_isere_ens.py b/tests/test_isere_ens.py index 5aa047aa..c5b0b96c 100644 --- a/tests/test_isere_ens.py +++ b/tests/test_isere_ens.py @@ -32,9 +32,7 @@ from passerelle.utils.jsonresponse import APIError @pytest.fixture def setup(db): return utils.setup_access_rights( - IsereENS.objects.create( - slug="test", base_url="https://ens38.example.net/", token="toktok" - ) + IsereENS.objects.create(slug="test", base_url="https://ens38.example.net/", token="toktok") ) @@ -143,9 +141,7 @@ BOOK_RESPONSE_REFUSED = """{ @mock.patch("passerelle.utils.Request.get") def test_get_sites(mocked_get, app, setup): - mocked_get.return_value = utils.FakedResponse( - content=SITES_RESPONSE, status_code=200 - ) + mocked_get.return_value = utils.FakedResponse(content=SITES_RESPONSE, status_code=200) endpoint = reverse( "generic-endpoint", kwargs={"connector": "isere-ens", "slug": setup.slug, "endpoint": "sites"}, @@ -173,9 +169,7 @@ def test_get_sites(mocked_get, app, setup): response = app.get(endpoint + "?kind=social") assert len(response.json["data"]) == 2 - mocked_get.return_value = utils.FakedResponse( - content=SD29B_RESPONSE, status_code=200 - ) + mocked_get.return_value = utils.FakedResponse(content=SD29B_RESPONSE, status_code=200) response = app.get(endpoint + "?id=SD29b") assert mocked_get.call_args[0][0].endswith("api/1.0.0/site/SD29b") assert len(response.json["data"]) == 1 @@ -183,19 +177,14 @@ def test_get_sites(mocked_get, app, setup): assert response.json["data"][0]["dogs"] == "LEASH" # bad response for ENS API - mocked_get.return_value = utils.FakedResponse( - content=SITE_404_RESPONSE, status_code=404 - ) + mocked_get.return_value = utils.FakedResponse(content=SITE_404_RESPONSE, status_code=404) response = app.get(endpoint + "?id=SD29x") assert mocked_get.call_args[0][0].endswith("api/1.0.0/site/SD29x") assert response.json["err"] == 1 assert response.json["err_class"].endswith("APIError") assert response.json["err_desc"].startswith("error status:404") assert response.json["data"]["status_code"] == 404 - assert ( - response.json["data"]["json_content"]["message"] - == "Site not found with code SD29x" - ) + assert response.json["data"]["json_content"]["message"] == "Site not found with code SD29x" mocked_get.return_value = utils.FakedResponse(content="crash", status_code=500) response = app.get(endpoint + "?id=foo500") assert mocked_get.call_args[0][0].endswith("api/1.0.0/site/foo500") @@ -220,9 +209,7 @@ def test_get_sites(mocked_get, app, setup): @mock.patch("passerelle.utils.Request.get") def test_get_animators(mocked_get, app, setup): - mocked_get.return_value = utils.FakedResponse( - content=ANIMATORS_RESPONSE, status_code=200 - ) + mocked_get.return_value = utils.FakedResponse(content=ANIMATORS_RESPONSE, status_code=200) endpoint = reverse( "generic-endpoint", kwargs={"connector": "isere-ens", "slug": setup.slug, "endpoint": "animators"}, @@ -256,9 +243,7 @@ def test_get_animators(mocked_get, app, setup): @mock.patch("passerelle.utils.Request.get") def test_get_site_calendar(mocked_get, app, setup, freezer): freezer.move_to("2021-01-21 12:00:00") - mocked_get.return_value = utils.FakedResponse( - content=SITE_CALENDAR_RESPONSE, status_code=200 - ) + mocked_get.return_value = utils.FakedResponse(content=SITE_CALENDAR_RESPONSE, status_code=200) endpoint = reverse( "generic-endpoint", kwargs={ @@ -295,31 +280,23 @@ def test_get_site_calendar(mocked_get, app, setup, freezer): assert mocked_get.call_args[1]["params"]["start_date"] == "2021-01-22" assert mocked_get.call_args[1]["params"]["end_date"] == "2021-04-24" assert response.json["err"] == 0 - response = app.get( - endpoint + "?site=SD29b&start_date=2021-01-22&end_date=2021-01-30" - ) + response = app.get(endpoint + "?site=SD29b&start_date=2021-01-22&end_date=2021-01-30") assert mocked_get.call_args[1]["params"]["start_date"] == "2021-01-22" assert mocked_get.call_args[1]["params"]["end_date"] == "2021-01-30" assert response.json["err"] == 0 response = app.get(endpoint + "?site=SD29b&start_date=foo", status=400) assert response.json["err"] == 1 assert response.json["err_class"].endswith("APIError") - assert ( - response.json["err_desc"] == "bad start_date format (foo), should be YYYY-MM-DD" - ) + assert response.json["err_desc"] == "bad start_date format (foo), should be YYYY-MM-DD" response = app.get(endpoint + "?site=SD29b&end_date=bar", status=400) assert response.json["err"] == 1 assert response.json["err_class"].endswith("APIError") - assert ( - response.json["err_desc"] == "bad end_date format (bar), should be YYYY-MM-DD" - ) + assert response.json["err_desc"] == "bad end_date format (bar), should be YYYY-MM-DD" @mock.patch("passerelle.utils.Request.post") def test_post_book(mocked_post, app, setup): - mocked_post.return_value = utils.FakedResponse( - content=BOOK_RESPONSE, status_code=200 - ) + mocked_post.return_value = utils.FakedResponse(content=BOOK_RESPONSE, status_code=200) endpoint = reverse( "generic-endpoint", kwargs={ @@ -359,18 +336,14 @@ def test_post_book(mocked_post, app, setup): assert response.json["err"] == 0 assert response.json["data"]["status"] == "BOOKING" - mocked_post.return_value = utils.FakedResponse( - content=BOOK_RESPONSE_OVERBOOKING, status_code=200 - ) + mocked_post.return_value = utils.FakedResponse(content=BOOK_RESPONSE_OVERBOOKING, status_code=200) response = app.post_json(endpoint, params=book) assert mocked_post.call_args[0][0].endswith("api/1.0.0/booking") assert mocked_post.call_count == 2 assert response.json["err"] == 0 assert response.json["data"]["status"] == "OVERBOOKING" - mocked_post.return_value = utils.FakedResponse( - content=BOOK_RESPONSE_REFUSED, status_code=200 - ) + mocked_post.return_value = utils.FakedResponse(content=BOOK_RESPONSE_REFUSED, status_code=200) response = app.post_json(endpoint, params=book) assert mocked_post.call_args[0][0].endswith("api/1.0.0/booking") assert mocked_post.call_count == 3 @@ -379,9 +352,7 @@ def test_post_book(mocked_post, app, setup): assert response.json["err_desc"] == "booking status is REFUSED" assert response.json["data"]["status"] == "REFUSED" - mocked_post.return_value = utils.FakedResponse( - content="""["not", "a", "dict"]""", status_code=200 - ) + mocked_post.return_value = utils.FakedResponse(content="""["not", "a", "dict"]""", status_code=200) response = app.post_json(endpoint, params=book) assert mocked_post.call_args[0][0].endswith("api/1.0.0/booking") assert mocked_post.call_count == 4 @@ -390,9 +361,7 @@ def test_post_book(mocked_post, app, setup): assert response.json["err_desc"] == "response is not a dict" assert response.json["data"] == ["not", "a", "dict"] - mocked_post.return_value = utils.FakedResponse( - content="""{"foo": "bar"}""", status_code=200 - ) + mocked_post.return_value = utils.FakedResponse(content="""{"foo": "bar"}""", status_code=200) response = app.post_json(endpoint, params=book) assert mocked_post.call_args[0][0].endswith("api/1.0.0/booking") assert mocked_post.call_count == 5 diff --git a/tests/test_iws.py b/tests/test_iws.py index 120de2b3..45ede849 100644 --- a/tests/test_iws.py +++ b/tests/test_iws.py @@ -11,18 +11,27 @@ def setup(db): api = ApiUser.objects.create(username='all', keytype='', key='') conn = IWSConnector.objects.create( wsdl_url='http://example.com/iws?wsdl', - operation_endpoint='http://example.com/iws', username='admin', password='admin', - database='somedb', slug='slug-iws') + operation_endpoint='http://example.com/iws', + username='admin', + password='admin', + database='somedb', + slug='slug-iws', + ) obj_type = ContentType.objects.get_for_model(conn) AccessRight.objects.create( - codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=conn.pk) + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=conn.pk + ) return conn def create_params(**kwargs): res = { - "firstname": "John", "lastname": "Doe", "description": "four : 1", - "date": "28/10/2018", "token": "token", "email_notif": True + "firstname": "John", + "lastname": "Doe", + "description": "four : 1", + "date": "28/10/2018", + "token": "token", + "email_notif": True, } res.update(kwargs) return res @@ -31,14 +40,16 @@ def create_params(**kwargs): def mock_soap_call(monkeypatch, return_value): mock_soap_call = Mock(return_value=return_value) import passerelle.contrib.iws.models + monkeypatch.setattr(passerelle.contrib.iws.models.IWSConnector, '_soap_call', mock_soap_call) return mock_soap_call def test_checkdate_dechet_or_encombrant(app, setup, endpoint_dummy_cache): response = app.get( - '/iws/slug-iws/checkdate/3155570464130003/error/3/?city=toulouse' - '&session_id=7a896f464ede7b4e', expect_errors=True) + '/iws/slug-iws/checkdate/3155570464130003/error/3/?city=toulouse' '&session_id=7a896f464ede7b4e', + expect_errors=True, + ) json_result = response.json_body assert json_result['err'] == 1 assert u'DECHET' in json_result['err_desc'] @@ -47,16 +58,16 @@ def test_checkdate_dechet_or_encombrant(app, setup, endpoint_dummy_cache): def test_checkdate_sti_code_optionnal_last_char(app, setup, endpoint_dummy_cache, monkeypatch): mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': 'Aucune dates disponibles' - } - }) + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': 'Aucune dates disponibles'}, + }, + ) response = app.get( - '/iws/slug-iws/checkdate/3155570464130003B/DECHET/3/?city=toulouse' - '&session_id=7a896f464ede7b4e') + '/iws/slug-iws/checkdate/3155570464130003B/DECHET/3/?city=toulouse' '&session_id=7a896f464ede7b4e' + ) json_result = response.json_body assert json_result['err'] == 0 @@ -64,21 +75,20 @@ def test_checkdate_sti_code_optionnal_last_char(app, setup, endpoint_dummy_cache def test_checkdate_iws_error_status(app, setup, monkeypatch, endpoint_dummy_cache): mock_soap_call(monkeypatch, {'status': 'KO', 'trace': 'some trace'}) response = app.get( - '/iws/slug-iws/checkdate/3155570464130003/DECHET/3/?city=toulouse' - '&session_id=7a896f464ede7b4e', expect_errors=True) + '/iws/slug-iws/checkdate/3155570464130003/DECHET/3/?city=toulouse' '&session_id=7a896f464ede7b4e', + expect_errors=True, + ) json_result = response.json_body assert json_result['err'] == 1 assert json_result['err_desc'] == 'iws error, status: "KO", trace: "some trace"' def test_checkdate_iws_error_no_appel(app, setup, monkeypatch, endpoint_dummy_cache): - mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': {'NO_APPEL': ''}}) + mock_soap_call(monkeypatch, {'status': 'responseOk', 'trace': '', 'fields': {'NO_APPEL': ''}}) response = app.get( - '/iws/slug-iws/checkdate/3155570464130003/DECHET/3/?city=toulouse' - '&session_id=7a896f464ede7b4e', expect_errors=True) + '/iws/slug-iws/checkdate/3155570464130003/DECHET/3/?city=toulouse' '&session_id=7a896f464ede7b4e', + expect_errors=True, + ) json_result = response.json_body assert json_result['err'] == 1 assert json_result['err_desc'] == 'iws error, missing token' @@ -86,15 +96,16 @@ def test_checkdate_iws_error_no_appel(app, setup, monkeypatch, endpoint_dummy_ca def test_checkdate_iws_no_dates(app, setup, monkeypatch, endpoint_dummy_cache): mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': 'Aucune dates disponibles' - } - }) - response = app.get('/iws/slug-iws/checkdate/3155570464130003/DECHET/3/?city=toulouse' - '&session_id=7a896f464ede7b4e') + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': 'Aucune dates disponibles'}, + }, + ) + response = app.get( + '/iws/slug-iws/checkdate/3155570464130003/DECHET/3/?city=toulouse' '&session_id=7a896f464ede7b4e' + ) json_result = response.json_body assert json_result['err'] == 0 assert json_result['data'] == [] @@ -103,15 +114,16 @@ def test_checkdate_iws_no_dates(app, setup, monkeypatch, endpoint_dummy_cache): def test_checkdate_iws_has_dates(app, setup, monkeypatch, settings, endpoint_dummy_cache): settings.LANGUAGE_CODE = 'fr-fr' soap_call = mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': '18/06/2018; 19/06/2018' - } - }) - response = app.get('/iws/slug-iws/checkdate/3155570464130003/DECHET/3/?city=toulouse' - '&session_id=7a896f464ede7b4e') + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': '18/06/2018; 19/06/2018'}, + }, + ) + response = app.get( + '/iws/slug-iws/checkdate/3155570464130003/DECHET/3/?city=toulouse' '&session_id=7a896f464ede7b4e' + ) json_result = response.json_body assert json_result['err'] == 0 dates = json_result['data'] @@ -125,15 +137,14 @@ def test_checkdate_iws_has_dates(app, setup, monkeypatch, settings, endpoint_dum def test_checkdate_dechet(app, setup, monkeypatch, endpoint_dummy_cache): soap_call = mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': '18/06/2018; 19/06/2018' - } - }) - app.get('/iws/slug-iws/checkdate/3155570464130003/DECHET/3/?city=toulouse' - '&session_id=7a896f464ede7b4e') + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': '18/06/2018; 19/06/2018'}, + }, + ) + app.get('/iws/slug-iws/checkdate/3155570464130003/DECHET/3/?city=toulouse' '&session_id=7a896f464ede7b4e') soap_args = soap_call.call_args[0][0] assert soap_args['C_EQUIPE'] == 'VPVIGIE' assert soap_args['I_AG_TYPEAGENDA'] == 'DECHETS VERTS' @@ -143,15 +154,17 @@ def test_checkdate_dechet(app, setup, monkeypatch, endpoint_dummy_cache): def test_checkdate_dechet_syndic(app, setup, monkeypatch, endpoint_dummy_cache): soap_call = mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': '18/06/2018; 19/06/2018' - } - }) - app.get('/iws/slug-iws/checkdate/3155570464130003/DECHET/3/?city=toulouse' - '&session_id=7a896f464ede7b4e&syndic=true') + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': '18/06/2018; 19/06/2018'}, + }, + ) + app.get( + '/iws/slug-iws/checkdate/3155570464130003/DECHET/3/?city=toulouse' + '&session_id=7a896f464ede7b4e&syndic=true' + ) soap_args = soap_call.call_args[0][0] assert soap_args['C_EQUIPE'] == 'VPVIGIE' assert soap_args['I_AG_TYPEAGENDA'] == 'DECHETS VERTS' @@ -161,15 +174,16 @@ def test_checkdate_dechet_syndic(app, setup, monkeypatch, endpoint_dummy_cache): def test_checkdate_encombrant(app, setup, monkeypatch, endpoint_dummy_cache): soap_call = mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': '18/06/2018; 19/06/2018' - } - }) - app.get('/iws/slug-iws/checkdate/3155570464130003/ENCOMBRANT/3/?city=toulouse' - '&session_id=7a896f464ede7b4e') + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': '18/06/2018; 19/06/2018'}, + }, + ) + app.get( + '/iws/slug-iws/checkdate/3155570464130003/ENCOMBRANT/3/?city=toulouse' '&session_id=7a896f464ede7b4e' + ) soap_args = soap_call.call_args[0][0] assert soap_args['C_EQUIPE'] == 'VPVIGIE' assert soap_args['I_AG_TYPEAGENDA'] == 'ENCOMBRANTS' @@ -177,8 +191,10 @@ def test_checkdate_encombrant(app, setup, monkeypatch, endpoint_dummy_cache): assert soap_args['C_TYPEPB'] == '8008' soap_call.reset() - app.get('/iws/slug-iws/checkdate/3155570464130003/ENCOMBRANT/3/?city=toulouse' - '&session_id=7a896f464ede7b4e&syndic=false') + app.get( + '/iws/slug-iws/checkdate/3155570464130003/ENCOMBRANT/3/?city=toulouse' + '&session_id=7a896f464ede7b4e&syndic=false' + ) soap_args = soap_call.call_args[0][0] assert soap_args['C_EQUIPE'] == 'VPVIGIE' assert soap_args['I_AG_TYPEAGENDA'] == 'ENCOMBRANTS' @@ -186,23 +202,28 @@ def test_checkdate_encombrant(app, setup, monkeypatch, endpoint_dummy_cache): assert soap_args['C_TYPEPB'] == '8008' # check invalid parameter - resp = app.get('/iws/slug-iws/checkdate/3155570464130003/ENCOMBRANT/3/?city=toulouse' - '&session_id=7a896f464ede7b4e&syndic=whatever', status=400) + resp = app.get( + '/iws/slug-iws/checkdate/3155570464130003/ENCOMBRANT/3/?city=toulouse' + '&session_id=7a896f464ede7b4e&syndic=whatever', + status=400, + ) assert resp.json.get('err') == 1 assert resp.json.get('err_desc') == 'invalid value for parameter "syndic"' def test_checkdate_encombrant_syndic(app, setup, monkeypatch, endpoint_dummy_cache): soap_call = mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': '18/06/2018; 19/06/2018' - } - }) - app.get('/iws/slug-iws/checkdate/3155570464130003/ENCOMBRANT/3/?city=toulouse' - '&session_id=7a896f464ede7b4e&syndic=true') + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': '18/06/2018; 19/06/2018'}, + }, + ) + app.get( + '/iws/slug-iws/checkdate/3155570464130003/ENCOMBRANT/3/?city=toulouse' + '&session_id=7a896f464ede7b4e&syndic=true' + ) soap_args = soap_call.call_args[0][0] assert soap_args['C_EQUIPE'] == 'VPVIGIE' assert soap_args['I_AG_TYPEAGENDA'] == 'ENCOMBRANTS' @@ -212,13 +233,13 @@ def test_checkdate_encombrant_syndic(app, setup, monkeypatch, endpoint_dummy_cac def test_bookdate(app, setup, monkeypatch): soap_call = mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': '18/06/2018;' - } - }) + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': '18/06/2018;'}, + }, + ) response = app.post_json('/iws/slug-iws/bookdate/', params=create_params()) soap_args = soap_call.call_args[0][0] assert soap_args['I_AP_SERVICE'] == 'OFFICE' @@ -232,13 +253,13 @@ def test_bookdate(app, setup, monkeypatch): def test_bookdate_no_mail(app, setup, monkeypatch): soap_call = mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': '18/06/2018;' - } - }) + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': '18/06/2018;'}, + }, + ) response = app.post_json('/iws/slug-iws/bookdate/', params=create_params()) soap_args = soap_call.call_args[0][0] assert not soap_args['I_AP_ADRESSEMAIL'] @@ -248,13 +269,13 @@ def test_bookdate_no_mail(app, setup, monkeypatch): def test_bookdate_mail(app, setup, monkeypatch): soap_call = mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': '18/06/2018;' - } - }) + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': '18/06/2018;'}, + }, + ) params = create_params() params['email'] = 'john.doe@localhost' response = app.post_json('/iws/slug-iws/bookdate/', params=params) @@ -266,13 +287,13 @@ def test_bookdate_mail(app, setup, monkeypatch): def test_bookdate_mail_notif(app, setup, monkeypatch): soap_call = mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': '18/06/2018;' - } - }) + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': '18/06/2018;'}, + }, + ) params = create_params() params['email_notif'] = True response = app.post_json('/iws/slug-iws/bookdate/', params=params) @@ -284,13 +305,13 @@ def test_bookdate_mail_notif(app, setup, monkeypatch): def test_bookdate_mail_no_notif(app, setup, monkeypatch): soap_call = mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': '18/06/2018;' - } - }) + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': '18/06/2018;'}, + }, + ) params = create_params() params['email_notif'] = False response = app.post_json('/iws/slug-iws/bookdate/', params=params) @@ -302,13 +323,13 @@ def test_bookdate_mail_no_notif(app, setup, monkeypatch): def test_bookdate_no_tel(app, setup, monkeypatch): soap_call = mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': '18/06/2018;' - } - }) + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': '18/06/2018;'}, + }, + ) response = app.post_json('/iws/slug-iws/bookdate/', params=create_params()) soap_args = soap_call.call_args[0][0] assert not soap_args['I_AP_TEL_DEMANDEU'] @@ -319,13 +340,13 @@ def test_bookdate_no_tel(app, setup, monkeypatch): def test_bookdate_tel_motif(app, setup, monkeypatch): soap_call = mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': '18/06/2018;' - } - }) + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': '18/06/2018;'}, + }, + ) params = create_params() params['tel_number'] = "0101010101" response = app.post_json('/iws/slug-iws/bookdate/', params=params) @@ -339,13 +360,13 @@ def test_bookdate_tel_motif(app, setup, monkeypatch): @pytest.mark.parametrize('sms', [True, 'trUe', 1, '1']) def test_bookdate_sms_true(app, setup, monkeypatch, sms): soap_call = mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': '18/06/2018;' - } - }) + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': '18/06/2018;'}, + }, + ) params = create_params() params['tel_number'] = "0101010101" params['sms'] = sms @@ -360,13 +381,13 @@ def test_bookdate_sms_true(app, setup, monkeypatch, sms): @pytest.mark.parametrize('sms', [False, 'faLse', 0, '0']) def test_bookdate_sms_false(app, setup, monkeypatch, sms): soap_call = mock_soap_call( - monkeypatch, { - 'status': 'responseOk', 'trace': '', - 'fields': { - 'NO_APPEL': 'sometoken', - 'I_APP_DATESPOSSIBLES': '18/06/2018;' - } - }) + monkeypatch, + { + 'status': 'responseOk', + 'trace': '', + 'fields': {'NO_APPEL': 'sometoken', 'I_APP_DATESPOSSIBLES': '18/06/2018;'}, + }, + ) params = create_params() params['tel_number'] = "0101010101" params['sms'] = sms diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 76796009..b0966aec 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -44,7 +44,8 @@ def test_jobs(mocked_get, app, base_adresse, freezer): assert Job.objects.get(id=job.id).status_details == {'error_summary': 'Exception: bano file is empty'} assert ResourceLog.objects.all().count() == 1 assert ResourceLog.objects.all()[0].message == ( - 'error running update_streets_data job (bano file is empty)') + 'error running update_streets_data job (bano file is empty)' + ) assert StreetModel.objects.count() == 3 with open(filepath, 'rb') as ban_file: @@ -109,7 +110,9 @@ def test_jobs(mocked_get, app, base_adresse, freezer): # use after_timestamp with add_job freezer.move_to('2019-01-01 00:00:00') - job = base_adresse.add_job('update_streets_data', after_timestamp=isodate.parse_datetime('2019-01-02T00:00:00+00:00')) + job = base_adresse.add_job( + 'update_streets_data', after_timestamp=isodate.parse_datetime('2019-01-02T00:00:00+00:00') + ) base_adresse.jobs() assert Job.objects.get(id=job.id).status == 'registered' freezer.move_to('2019-01-02 01:00:00') diff --git a/tests/test_jsondatastore.py b/tests/test_jsondatastore.py index c88ee2d3..2b82f4bf 100644 --- a/tests/test_jsondatastore.py +++ b/tests/test_jsondatastore.py @@ -14,17 +14,20 @@ def jsondatastore(db): datastore = JsonDataStore.objects.create(slug='foobar') api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(datastore) - AccessRight.objects.create(codename='can_access', apiuser=api, - resource_type=obj_type, resource_pk=datastore.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=datastore.pk + ) return datastore + @pytest.fixture def jsondatastore2(db): datastore = JsonDataStore.objects.create(slug='foobar2') api = ApiUser.objects.create(username='all2', keytype='', key='') obj_type = ContentType.objects.get_for_model(datastore) - AccessRight.objects.create(codename='can_access', apiuser=api, - resource_type=obj_type, resource_pk=datastore.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=datastore.pk + ) return datastore @@ -92,6 +95,7 @@ def test_jsondatastore(app, jsondatastore, jsondatastore2): resp = app.post_json('/jsondatastore/foobar2/data/%s/delete' % uuid, status=404) assert resp.json['err'] == 1 + def test_jsondatastore_name_id(app, jsondatastore): resp = app.get('/jsondatastore/foobar/data/') assert resp.json == {'data': [], 'err': 0} @@ -130,6 +134,7 @@ def test_jsondatastore_name_id(app, jsondatastore): resp = app.post_json('/jsondatastore/foobar/data/%s/delete?name_id=yyy' % uuid) assert resp.json['err'] == 0 + def test_jsondatastore_template(app, jsondatastore): jsondatastore.text_value_template = '{{foo}}' jsondatastore.save() @@ -147,6 +152,7 @@ def test_jsondatastore_template(app, jsondatastore): assert resp.json['data'][0]['text'] == 'aaa' assert resp.json['data'][1]['text'] == 'bar' + def test_jsondatastore_get_by_attribute(app, jsondatastore): resp = app.post_json('/jsondatastore/foobar/data/create', params={'foo': 'bar'}) uuid = resp.json['id'] @@ -176,6 +182,7 @@ def test_jsondatastore_get_by_attribute(app, jsondatastore): resp = app.get('/jsondatastore/foobar/data/by/foo/', params={'value': 'bar3', 'name_id': 'zzz'}) assert resp.json['err'] == 1 + def test_jsondatastore_datetimes(app, jsondatastore): encoder = DjangoJSONEncoder() resp = app.post_json('/jsondatastore/foobar/data/create', params={'foo': 'bar'}) @@ -197,30 +204,29 @@ def test_jsondatastore_datetimes(app, jsondatastore): def test_jsondatastore_list_by_attribute_filter(app, jsondatastore): - resp = app.post_json('/jsondatastore/foobar/data/create?name_id=name1', - params={'key1': 'val1', 'key2': 'foo'}) + resp = app.post_json( + '/jsondatastore/foobar/data/create?name_id=name1', params={'key1': 'val1', 'key2': 'foo'} + ) uuid1 = resp.json['id'] - resp = app.post_json('/jsondatastore/foobar/data/create?name_id=name2', - params={'key1': 'val1', 'key2': 'val2'}) + resp = app.post_json( + '/jsondatastore/foobar/data/create?name_id=name2', params={'key1': 'val1', 'key2': 'val2'} + ) uuid2 = resp.json['id'] - resp = app.post_json('/jsondatastore/foobar/data/create?name_id=name1', - params={'key1': 'val1', 'key2': 'val2'}) + resp = app.post_json( + '/jsondatastore/foobar/data/create?name_id=name1', params={'key1': 'val1', 'key2': 'val2'} + ) uuid3 = resp.json['id'] - resp = app.get('/jsondatastore/foobar/data/', - params={'key2': 'foo'}) + resp = app.get('/jsondatastore/foobar/data/', params={'key2': 'foo'}) assert [d['id'] for d in resp.json['data']] == [uuid1] - resp = app.get('/jsondatastore/foobar/data/', - params={'key1': 'val1', 'key2': 'val2'}) + resp = app.get('/jsondatastore/foobar/data/', params={'key1': 'val1', 'key2': 'val2'}) assert sorted([d['id'] for d in resp.json['data']]) == sorted([uuid2, uuid3]) - resp = app.get('/jsondatastore/foobar/data/?name_id=name1', - params={'key2': 'val2'}) + resp = app.get('/jsondatastore/foobar/data/?name_id=name1', params={'key2': 'val2'}) assert [d['id'] for d in resp.json['data']] == [uuid3] - resp = app.get('/jsondatastore/foobar/data/', - params={'key3': ''}) + resp = app.get('/jsondatastore/foobar/data/', params={'key3': ''}) assert [d['id'] for d in resp.json['data']] == [] @@ -233,10 +239,8 @@ def test_jsondatastore_list_by_q_attribute(app, jsondatastore): uuid2 = resp.json['id'] resp = app.post_json('/jsondatastore/foobar/data/create', params={'foo': 'ras'}) - resp = app.get('/jsondatastore/foobar/data/', - params={'q': 'àR'}) + resp = app.get('/jsondatastore/foobar/data/', params={'q': 'àR'}) assert sorted([d['id'] for d in resp.json['data']]) == sorted([uuid1, uuid2]) - resp = app.get('/jsondatastore/foobar/data/', - params={'q': 'na'}) + resp = app.get('/jsondatastore/foobar/data/', params={'q': 'na'}) assert resp.json['data'] == [] diff --git a/tests/test_jsonresponse.py b/tests/test_jsonresponse.py index c80c7a07..d0fad637 100644 --- a/tests/test_jsonresponse.py +++ b/tests/test_jsonresponse.py @@ -36,6 +36,7 @@ def log_as_warning_exception(req, *args, **kwargs): def http404_exception(req, *args, **kwargs): raise Http404 + @to_json() def doesnotexist_exception(req, *args, **kwargs): raise ObjectDoesNotExist @@ -61,11 +62,13 @@ def test_jsonresponselog_get(caplog): assert hasattr(record, 'body') assert "Error occurred while processing request" in record.message + def test_jsonresponselog_http404(caplog): request = RequestFactory() http404_exception(request.get('/')) assert caplog.records == [] + def test_jsonresponselog_doesnotexist(caplog): request = RequestFactory() doesnotexist_exception(request.get('/')) @@ -104,6 +107,7 @@ def test_jsonresponse_log_as_warning_exception(caplog): def test_jsonresponse_error_header(): request = RequestFactory() req = request.get('/') + @to_json() def test_func(req): return {"test": "un test"} @@ -112,10 +116,12 @@ def test_jsonresponse_error_header(): assert result.status_code == 200 data = json_loads(result.content) assert data == {'test': 'un test', 'err': 0} + @to_json() def test_func(req): class CustomException(Exception): http_status = 200 + raise CustomException result = test_func(req) @@ -126,17 +132,21 @@ def test_jsonresponse_error_header(): assert data['err_class'] == 'test_jsonresponse.CustomException' assert result.status_code == 200 + def test_jsonresponse_with_http4O4_exception(): request = RequestFactory() response = http404_exception(request.get('/')) assert response.status_code == 404 + def test_jsonresponse_with_callback(): request = RequestFactory() req = request.get('/?callback=myfunc') + @to_json() def test_func(req): return {"foo": "bar"} + result = test_func(req) content_type = result.get('Content-Type') assert 'application/javascript' in content_type @@ -144,11 +154,14 @@ def test_jsonresponse_with_callback(): args = json_loads(result.content[7:-2]) assert args == {'foo': 'bar', 'err': 0} + def test_jsonresponse_with_wrong_callback(): request = RequestFactory() req = request.get('/?callback=myfunc()') + @to_json() def test_func(req): return {"foo": "bar"} + result = test_func(req) assert result.status_code == 400 diff --git a/tests/test_lille_kimoce.py b/tests/test_lille_kimoce.py index 79e8efa4..0f7fafba 100644 --- a/tests/test_lille_kimoce.py +++ b/tests/test_lille_kimoce.py @@ -28,12 +28,14 @@ from django.urls import reverse from passerelle.contrib.lille_kimoce.models import Kimoce from passerelle.utils.jsonresponse import APIError + @pytest.fixture def setup(db): return utils.setup_access_rights( - Kimoce.objects.create(slug='test', - base_url='https://kimoce.mairie-lille.fr/api/', - username='test', password='secret')) + Kimoce.objects.create( + slug='test', base_url='https://kimoce.mairie-lille.fr/api/', username='test', password='secret' + ) + ) CATEGORIES_RESPONSE = """{ @@ -186,6 +188,7 @@ TOKEN_RESPONSE = """{"token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9"}""" TOKEN_ERROR_RESPONSE = """{"message": "Bad credentials"}""" + @mock.patch('passerelle.utils.Request.post') def test_get_token(mocked_post, app, setup): with pytest.raises(APIError): @@ -203,18 +206,14 @@ def test_get_token(mocked_post, app, setup): setup.get_token(True) assert mocked_post.call_count == 3 + @mock.patch('passerelle.utils.Request.post') @mock.patch('passerelle.utils.Request.get') def test_get_categories(mocked_get, mocked_post, app, setup): mocked_post.return_value = utils.FakedResponse(content=TOKEN_RESPONSE, status_code=200) mocked_get.return_value = utils.FakedResponse(content=CATEGORIES_RESPONSE, status_code=200) endpoint = reverse( - 'generic-endpoint', - kwargs={ - 'connector': 'lille-kimoce', - 'slug': setup.slug, - 'endpoint': 'categories' - } + 'generic-endpoint', kwargs={'connector': 'lille-kimoce', 'slug': setup.slug, 'endpoint': 'categories'} ) response = app.get(endpoint) assert 'data' in response.json @@ -232,11 +231,8 @@ def test_get_categories(mocked_get, mocked_post, app, setup): def test_get_types(mocked_get, mocked_post, app, setup): mocked_post.return_value = utils.FakedResponse(content=TOKEN_RESPONSE, status_code=200) mocked_get.return_value = utils.FakedResponse(content=TYPES_RESPONSE, status_code=200) - endpoint = reverse('generic-endpoint', - kwargs={'connector': 'lille-kimoce', - 'slug': setup.slug, - 'endpoint': 'types' - } + endpoint = reverse( + 'generic-endpoint', kwargs={'connector': 'lille-kimoce', 'slug': setup.slug, 'endpoint': 'types'} ) response = app.get(endpoint, status=400) assert response.json['err'] @@ -257,11 +253,8 @@ def test_get_types(mocked_get, mocked_post, app, setup): def test_get_sub_types(mocked_get, mocked_post, app, setup): mocked_post.return_value = utils.FakedResponse(content=TOKEN_RESPONSE, status_code=200) mocked_get.return_value = utils.FakedResponse(content=SUB_TYPES_RESPONSE, status_code=200) - endpoint = reverse('generic-endpoint', - kwargs={'connector': 'lille-kimoce', - 'slug': setup.slug, - 'endpoint': 'subtypes' - } + endpoint = reverse( + 'generic-endpoint', kwargs={'connector': 'lille-kimoce', 'slug': setup.slug, 'endpoint': 'subtypes'} ) response = app.get(endpoint, status=400) assert response.json['err'] @@ -282,11 +275,8 @@ def test_get_sub_types(mocked_get, mocked_post, app, setup): def test_get_streets(mocked_get, mocked_post, app, setup): mocked_post.return_value = utils.FakedResponse(content=TOKEN_RESPONSE, status_code=200) mocked_get.return_value = utils.FakedResponse(content=STREETS_RESPONSE, status_code=200) - endpoint = reverse('generic-endpoint', - kwargs={'connector': 'lille-kimoce', - 'slug': setup.slug, - 'endpoint': 'streets' - } + endpoint = reverse( + 'generic-endpoint', kwargs={'connector': 'lille-kimoce', 'slug': setup.slug, 'endpoint': 'streets'} ) response = app.get(endpoint) assert 'data' in response.json @@ -308,7 +298,8 @@ def test_get_streets(mocked_get, mocked_post, app, setup): def test_create_demand(mocked_post, app, setup): mocked_post.side_effect = [ utils.FakedResponse(content=TOKEN_RESPONSE, status_code=200), - utils.FakedResponse(content=DEMAND_CREATION_RESPONSE, status_code=200)] + utils.FakedResponse(content=DEMAND_CREATION_RESPONSE, status_code=200), + ] data = { 'category': '/api/categories/80', 'type': '/api/types/1825', @@ -318,26 +309,21 @@ def test_create_demand(mocked_post, app, setup): 'last_name': 'Bar', 'email': 'foo@example.net', } - endpoint = reverse('generic-endpoint', - kwargs={'connector': 'lille-kimoce', - 'slug': setup.slug, - 'endpoint': 'create_demand' - } + endpoint = reverse( + 'generic-endpoint', + kwargs={'connector': 'lille-kimoce', 'slug': setup.slug, 'endpoint': 'create_demand'}, ) response = app.post_json(endpoint, params=data) assert mocked_post.call_args[0][0] == 'https://kimoce.mairie-lille.fr/api/demands' - assert mocked_post.call_args[1]['json'] == {'GRUResponseLink': 'http://example.com/form/1/', - 'priorityId': 3, 'type': '/api/types/1825', - 'category': '/api/categories/80', - 'companyLocation': {'number': '', - 'road': '', - 'zipCode': '', - 'city': ''}, - 'pictures': [], - 'subType': '/api/sub_types/164', - 'sourceContact': {'firstname': 'Foo', - 'mail': 'foo@example.net', - 'lastname': u'Bar'} + assert mocked_post.call_args[1]['json'] == { + 'GRUResponseLink': 'http://example.com/form/1/', + 'priorityId': 3, + 'type': '/api/types/1825', + 'category': '/api/categories/80', + 'companyLocation': {'number': '', 'road': '', 'zipCode': '', 'city': ''}, + 'pictures': [], + 'subType': '/api/sub_types/164', + 'sourceContact': {'firstname': 'Foo', 'mail': 'foo@example.net', 'lastname': u'Bar'}, } assert response.json['data'] == json.loads(DEMAND_CREATION_RESPONSE) assert mocked_post.call_count == 2 diff --git a/tests/test_lille_urban_card.py b/tests/test_lille_urban_card.py index 89064bb4..3c1d6e2a 100644 --- a/tests/test_lille_urban_card.py +++ b/tests/test_lille_urban_card.py @@ -11,14 +11,14 @@ from passerelle.utils.jsonresponse import APIError import utils + @pytest.fixture def connector(db): return utils.setup_access_rights( LilleUrbanCard.objects.create( - slug='test', - base_url='http://localhost', - username='test', - password='secret')) + slug='test', base_url='http://localhost', username='test', password='secret' + ) + ) TOKEN_ERROR_RESPONSE = '{"erreur":"Authentification échouée"}' @@ -82,11 +82,10 @@ def mocked_http(url, request): error = {'erreur': 'Pas de carte attribuee correspondante'} return {'content': json.dumps(error), 'status_code': 404} assert url.query == 'numero_serie=XXX' - return {'content': json.dumps({ - 'numero_serie': 'XXX', - 'date_debut_abonnement': 'xx/xx/xxxx'}), - 'status_code': 200} - + return { + 'content': json.dumps({'numero_serie': 'XXX', 'date_debut_abonnement': 'xx/xx/xxxx'}), + 'status_code': 200, + } @mock.patch('passerelle.utils.Request.post') @@ -107,16 +106,19 @@ def test_csp(app, connector): def test_card_request(app, connector): endpoint = utils.generic_endpoint_url('lille-urban-card', 'card_request', slug=connector.slug) with HTTMock(mocked_http): - resp = app.post_json(endpoint, params={ - 'civilite': 'Monsieur', - 'code_postal': '59000', - 'ville': 'Lille', - 'photo': {'content': 'xxx'}, - 'telephone': '01.02.03.04.05', - 'recevoir_journal_senior': 'Non', - 'service_zoo': 'Oui', - 'service_zoo_newsletter': 'Non', - }) + resp = app.post_json( + endpoint, + params={ + 'civilite': 'Monsieur', + 'code_postal': '59000', + 'ville': 'Lille', + 'photo': {'content': 'xxx'}, + 'telephone': '01.02.03.04.05', + 'recevoir_journal_senior': 'Non', + 'service_zoo': 'Oui', + 'service_zoo_newsletter': 'Non', + }, + ) request = resp.json['data']['request'] assert request['civilite'] == 1 assert request['code_postal'] == 59000 @@ -127,30 +129,38 @@ def test_card_request(app, connector): assert resp.json['data']['n_demande_clu'] == 10000005 # error handling - resp = app.post_json(endpoint, params={ - 'simulate_error': 'doublon', - 'civilite': 'Monsieur', - 'code_postal': '59000', - 'ville': 'Lille', - 'photo': {'content': 'xxx'}, - 'telephone': '01.02.03.04.05', - 'recevoir_journal_senior': 'Non', - 'service_zoo': 'Oui', - 'service_zoo_newsletter': 'Non', - }, status=200) + resp = app.post_json( + endpoint, + params={ + 'simulate_error': 'doublon', + 'civilite': 'Monsieur', + 'code_postal': '59000', + 'ville': 'Lille', + 'photo': {'content': 'xxx'}, + 'telephone': '01.02.03.04.05', + 'recevoir_journal_senior': 'Non', + 'service_zoo': 'Oui', + 'service_zoo_newsletter': 'Non', + }, + status=200, + ) assert resp.json['data']['status_code'] == 409 - resp = app.post_json(endpoint, params={ - 'simulate_error': 'num_serie', - 'civilite': 'Monsieur', - 'code_postal': '59000', - 'ville': 'Lille', - 'photo': {'content': 'xxx'}, - 'telephone': '01.02.03.04.05', - 'recevoir_journal_senior': 'Non', - 'service_zoo': 'Oui', - 'service_zoo_newsletter': 'Non', - }, status=200) + resp = app.post_json( + endpoint, + params={ + 'simulate_error': 'num_serie', + 'civilite': 'Monsieur', + 'code_postal': '59000', + 'ville': 'Lille', + 'photo': {'content': 'xxx'}, + 'telephone': '01.02.03.04.05', + 'recevoir_journal_senior': 'Non', + 'service_zoo': 'Oui', + 'service_zoo_newsletter': 'Non', + }, + status=200, + ) assert resp.json['data']['status_code'] == 404 assert resp.json['data']['statut'] == 'ERREUR_NUM_SERIE' @@ -165,18 +175,21 @@ def test_card_status(app, connector): def test_add_subscriptions(app, connector): endpoint = utils.generic_endpoint_url('lille-urban-card', 'add_subscriptions', slug=connector.slug) with HTTMock(mocked_http): - resp = app.post_json(endpoint, params={ - 'n_demande_gru': 'XXX', - 'numero_serie': 'XXX', - 'civilite': 'Monsieur', - 'code_postal': '59000', - 'ville': 'Lille', - 'photo': {'content': 'xxx'}, - 'telephone': '01.02.03.04.05', - 'recevoir_journal_senior': 'Non', - 'service_zoo': 'Oui', - 'service_zoo_newsletter': 'Non', - }) + resp = app.post_json( + endpoint, + params={ + 'n_demande_gru': 'XXX', + 'numero_serie': 'XXX', + 'civilite': 'Monsieur', + 'code_postal': '59000', + 'ville': 'Lille', + 'photo': {'content': 'xxx'}, + 'telephone': '01.02.03.04.05', + 'recevoir_journal_senior': 'Non', + 'service_zoo': 'Oui', + 'service_zoo_newsletter': 'Non', + }, + ) request = resp.json['data']['request'] assert request['civilite'] == 1 assert request['code_postal'] == 59000 @@ -187,41 +200,52 @@ def test_add_subscriptions(app, connector): assert resp.json['data']['n_demande_clu'] == 10000005 # error handling - resp = app.post_json(endpoint, params={ - 'simulate_error': 'data', - 'n_demande_gru': 'XXX', - 'numero_serie': 'XXX', - 'civilite': 'Monsieur', - 'code_postal': '59000', - 'ville': 'Lille', - 'photo': {'content': 'xxx'}, - 'telephone': '01.02.03.04.05', - 'recevoir_journal_senior': 'Non', - 'service_zoo': 'Oui', - 'service_zoo_newsletter': 'Non', - }, status=200) + resp = app.post_json( + endpoint, + params={ + 'simulate_error': 'data', + 'n_demande_gru': 'XXX', + 'numero_serie': 'XXX', + 'civilite': 'Monsieur', + 'code_postal': '59000', + 'ville': 'Lille', + 'photo': {'content': 'xxx'}, + 'telephone': '01.02.03.04.05', + 'recevoir_journal_senior': 'Non', + 'service_zoo': 'Oui', + 'service_zoo_newsletter': 'Non', + }, + status=200, + ) assert resp.json['data']['status_code'] == 400 def test_code_change(app, connector): endpoint = utils.generic_endpoint_url('lille-urban-card', 'code_change', slug=connector.slug) with HTTMock(mocked_http): - resp = app.post_json(endpoint, params={ - 'numero_serie': 'XXX', - 'ancien': '1234', - 'nouveau': '2345', - 'confirmation_nouveau': '2345', - }) + resp = app.post_json( + endpoint, + params={ + 'numero_serie': 'XXX', + 'ancien': '1234', + 'nouveau': '2345', + 'confirmation_nouveau': '2345', + }, + ) assert resp.json['err'] == 0 # error handling - resp = app.post_json(endpoint, params={ - 'simulate_error': 'wrong num serie', - 'numero_serie': 'XXX', - 'ancien': '1234', - 'nouveau': '2345', - 'confirmation_nouveau': '2345', - }, status=200) + resp = app.post_json( + endpoint, + params={ + 'simulate_error': 'wrong num serie', + 'numero_serie': 'XXX', + 'ancien': '1234', + 'nouveau': '2345', + 'confirmation_nouveau': '2345', + }, + status=200, + ) assert resp.json['err'] == 1 assert resp.json['data']['status_code'] == 404 @@ -229,53 +253,74 @@ def test_code_change(app, connector): def test_code_check(app, connector): endpoint = utils.generic_endpoint_url('lille-urban-card', 'code_check', slug=connector.slug) with HTTMock(mocked_http): - resp = app.post_json(endpoint, params={ - 'numero_serie': 'XXX', - 'password': '1234', - }) + resp = app.post_json( + endpoint, + params={ + 'numero_serie': 'XXX', + 'password': '1234', + }, + ) assert resp.json['err'] == 0 # error handling - resp = app.post_json(endpoint, params={ - 'simulate_error': 'wrong num serie', - 'numero_serie': 'XXX', - 'password': '1234', - }, status=200) + resp = app.post_json( + endpoint, + params={ + 'simulate_error': 'wrong num serie', + 'numero_serie': 'XXX', + 'password': '1234', + }, + status=200, + ) assert resp.json['err'] == 1 def test_card_info(app, connector): endpoint = utils.generic_endpoint_url('lille-urban-card', 'card_info', slug=connector.slug) with HTTMock(mocked_http): - resp = app.get(endpoint, params={ - 'numero_serie': 'XXX', - }) + resp = app.get( + endpoint, + params={ + 'numero_serie': 'XXX', + }, + ) assert resp.json['err'] == 0 # error handling - resp = app.get(endpoint, params={ - 'numero_serie': 'ERROR', - }, status=200) + resp = app.get( + endpoint, + params={ + 'numero_serie': 'ERROR', + }, + status=200, + ) assert resp.json['err'] == 1 def test_card_revocation(app, connector): endpoint = utils.generic_endpoint_url('lille-urban-card', 'card_revocation', slug=connector.slug) with HTTMock(mocked_http): - resp = app.post_json(endpoint, params={ - 'n_demande_gru': '12-123', - 'numero_serie': 'XXX', - 'date_demande': '05/02/2020', - }) + resp = app.post_json( + endpoint, + params={ + 'n_demande_gru': '12-123', + 'numero_serie': 'XXX', + 'date_demande': '05/02/2020', + }, + ) assert resp.json['err'] == 0 # error handling - resp = app.post_json(endpoint, params={ - 'simulate_error': 'doublon', - 'n_demande_gru': '12-123', - 'numero_serie': 'XXX', - 'date_demande': '05/02/2020', - }, status=200) + resp = app.post_json( + endpoint, + params={ + 'simulate_error': 'doublon', + 'n_demande_gru': '12-123', + 'numero_serie': 'XXX', + 'date_demande': '05/02/2020', + }, + status=200, + ) assert resp.json['err'] == 1 assert resp.json['data']['status_code'] == 409 @@ -283,23 +328,30 @@ def test_card_revocation(app, connector): def test_subscription_revocation(app, connector): endpoint = utils.generic_endpoint_url('lille-urban-card', 'subscription_revocation', slug=connector.slug) with HTTMock(mocked_http): - resp = app.post_json(endpoint, params={ - 'n_demande_gru': '12-123', - 'numero_serie': 'XXX', - 'date_demande': '05/02/2020', - 'service_zoo': 'Oui', - 'service_zoo_newsletter': 'Non', - }) + resp = app.post_json( + endpoint, + params={ + 'n_demande_gru': '12-123', + 'numero_serie': 'XXX', + 'date_demande': '05/02/2020', + 'service_zoo': 'Oui', + 'service_zoo_newsletter': 'Non', + }, + ) assert resp.json['err'] == 0 # error handling - resp = app.post_json(endpoint, params={ - 'simulate_error': 'doublon', - 'n_demande_gru': '12-123', - 'numero_serie': 'XXX', - 'date_demande': '05/02/2020', - 'service_zoo': 'Oui', - 'service_zoo_newsletter': 'Non', - }, status=200) + resp = app.post_json( + endpoint, + params={ + 'simulate_error': 'doublon', + 'n_demande_gru': '12-123', + 'numero_serie': 'XXX', + 'date_demande': '05/02/2020', + 'service_zoo': 'Oui', + 'service_zoo_newsletter': 'Non', + }, + status=200, + ) assert resp.json['err'] == 1 assert resp.json['data']['status_code'] == 409 diff --git a/tests/test_maelis.py b/tests/test_maelis.py index 7d7eb835..e1f9d4e5 100644 --- a/tests/test_maelis.py +++ b/tests/test_maelis.py @@ -10,7 +10,11 @@ from django.utils.dateparse import parse_date from passerelle.apps.maelis.models import Maelis, Link from passerelle.apps.maelis.utils import ( - get_school_year, week_boundaries_datetimes, month_range, decompose_event) + get_school_year, + week_boundaries_datetimes, + month_range, + decompose_event, +) from passerelle.utils.jsonresponse import APIError @@ -49,45 +53,52 @@ def activity_service_wsdl(): @pytest.fixture def catalog_mocked_get(activity_service_wsdl, family_service_wsdl): return ( - utils.FakedResponse(content=family_service_wsdl, - status_code=200, - headers={'Content-Type': 'text/xml'}), - utils.FakedResponse(content=activity_service_wsdl, - status_code=200, - headers={'Content-Type': 'text/xml'}), + utils.FakedResponse( + content=family_service_wsdl, status_code=200, headers={'Content-Type': 'text/xml'} + ), + utils.FakedResponse( + content=activity_service_wsdl, status_code=200, headers={'Content-Type': 'text/xml'} + ), ) @pytest.fixture def catalog_mocked_post(): return ( - utils.FakedResponse(content=get_xml_file('readFamily.xml'), - status_code=200, - headers={'Content-Type': 'text/xml'}), - utils.FakedResponse(content=get_xml_file('readActivityListResponse.xml'), - status_code=200, - headers={'Content-Type': 'text/xml'}), + utils.FakedResponse( + content=get_xml_file('readFamily.xml'), status_code=200, headers={'Content-Type': 'text/xml'} + ), + utils.FakedResponse( + content=get_xml_file('readActivityListResponse.xml'), + status_code=200, + headers={'Content-Type': 'text/xml'}, + ), ) @pytest.fixture def connector(db): - return utils.setup_access_rights(Maelis.objects.create( - slug='test', login='foo', password='bar', - base_url='http://www3.sigec.fr/entrouvertws/services/')) + return utils.setup_access_rights( + Maelis.objects.create( + slug='test', login='foo', password='bar', base_url='http://www3.sigec.fr/entrouvertws/services/' + ) + ) @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_link_unlink(mocked_post, mocked_get, family_service_wsdl, - connector, app): +def test_link_unlink(mocked_post, mocked_get, family_service_wsdl, connector, app): mocked_get.return_value = mock.Mock(content=family_service_wsdl) - mocked_post.return_value = mock.Mock(content=get_xml_file('readFamilyByPasswordError.xml'), - status_code=500, - headers={'Content-Type': 'text/xml'}) + mocked_post.return_value = mock.Mock( + content=get_xml_file('readFamilyByPasswordError.xml'), + status_code=500, + headers={'Content-Type': 'text/xml'}, + ) assert Link.objects.count() == 0 - resp = app.post_json('/maelis/test/link?NameID=local', - params={'family_id': '3264', 'password': 'wrong', 'school_year': '2020'}) + resp = app.post_json( + '/maelis/test/link?NameID=local', + params={'family_id': '3264', 'password': 'wrong', 'school_year': '2020'}, + ) assert resp.json['err'] == 1 assert resp.json['err_desc'] == 'E204 : Le mot de passe est incorrect' assert not resp.json['data'] @@ -97,11 +108,12 @@ def test_link_unlink(mocked_post, mocked_get, family_service_wsdl, assert resp.json['err'] assert resp.json['err_desc'] == 'User not linked to family' - mocked_post.return_value = mock.Mock(content=get_xml_file('readFamilyByPasswordResult.xml'), - status_code=200, - headers={'Content-Type': 'text/xml'}) - resp = app.post_json('/maelis/test/link?NameID=local', - params={'family_id': '3264', 'password': '123456'}) + mocked_post.return_value = mock.Mock( + content=get_xml_file('readFamilyByPasswordResult.xml'), + status_code=200, + headers={'Content-Type': 'text/xml'}, + ) + resp = app.post_json('/maelis/test/link?NameID=local', params={'family_id': '3264', 'password': '123456'}) assert resp.json['err'] == 0 assert resp.json['data'] assert resp.json['data']['number'] == 3264 @@ -117,12 +129,11 @@ def test_link_unlink(mocked_post, mocked_get, family_service_wsdl, @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_family_info(mocked_post, mocked_get, family_service_wsdl, - connector, app): +def test_family_info(mocked_post, mocked_get, family_service_wsdl, connector, app): mocked_get.return_value = mock.Mock(content=family_service_wsdl) - mocked_post.return_value = mock.Mock(content=get_xml_file('readFamily.xml'), - status_code=200, - headers={'Content-Type': 'text/xml'}) + mocked_post.return_value = mock.Mock( + content=get_xml_file('readFamily.xml'), status_code=200, headers={'Content-Type': 'text/xml'} + ) assert Link.objects.count() == 0 Link.objects.create(resource=connector, family_id='3264', name_id='local') resp = app.get('/maelis/test/family-info?NameID=local') @@ -150,8 +161,7 @@ def test_family_info(mocked_post, mocked_get, family_service_wsdl, @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_activity_list(mocked_post, mocked_get, - catalog_mocked_get, catalog_mocked_post, connector, app): +def test_activity_list(mocked_post, mocked_get, catalog_mocked_get, catalog_mocked_post, connector, app): mocked_get.side_effect = catalog_mocked_get mocked_post.side_effect = catalog_mocked_post Link.objects.create(resource=connector, family_id='3264', name_id='local') @@ -164,39 +174,42 @@ def test_activity_list(mocked_post, mocked_get, @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_update_coordinates(mocked_post, mocked_get, family_service_wsdl, - connector, app): +def test_update_coordinates(mocked_post, mocked_get, family_service_wsdl, connector, app): mocked_get.return_value = mock.Mock(content=family_service_wsdl) mocked_post.side_effect = ( - utils.FakedResponse(content=get_xml_file('updateCoordinatesResponse.xml'), - status_code=200, - headers={'Content-Type': 'text/xml'}), - utils.FakedResponse(content=get_xml_file('updateCoordinatesError.xml'), - status_code=200, - headers={'Content-Type': 'text/xml'}) + utils.FakedResponse( + content=get_xml_file('updateCoordinatesResponse.xml'), + status_code=200, + headers={'Content-Type': 'text/xml'}, + ), + utils.FakedResponse( + content=get_xml_file('updateCoordinatesError.xml'), + status_code=200, + headers={'Content-Type': 'text/xml'}, + ), ) Link.objects.create(resource=connector, family_id='3264', name_id='local') - resp = app.post_json('/maelis/test/update-coordinates?NameID=local&personID=21293', - params={'mail': 'foo@example.com'} - ) + resp = app.post_json( + '/maelis/test/update-coordinates?NameID=local&personID=21293', params={'mail': 'foo@example.com'} + ) assert resp.content is not None - resp = app.post_json('/maelis/test/update-coordinates?NameID=local&personID=21293', - params={'town': 'Paris', 'num': '169'} - ) + resp = app.post_json( + '/maelis/test/update-coordinates?NameID=local&personID=21293', params={'town': 'Paris', 'num': '169'} + ) assert resp.json['err'] assert resp.json['err_desc'] == 'E16 : Le code postal est obligatoire' @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_list_invoices(mocked_post, mocked_get, invoice_service_wsdl, - connector, app): +def test_list_invoices(mocked_post, mocked_get, invoice_service_wsdl, connector, app): mocked_get.return_value = mock.Mock(content=invoice_service_wsdl) - mocked_post.return_value = mock.Mock(content=get_xml_file('readInvoicesResponse.xml'), - status_code=200, - headers={'Content-Type': 'text/xml'} - ) + mocked_post.return_value = mock.Mock( + content=get_xml_file('readInvoicesResponse.xml'), + status_code=200, + headers={'Content-Type': 'text/xml'}, + ) Link.objects.create(resource=connector, family_id='3264', name_id='local') resp = app.get('/maelis/test/regie/1/invoices?NameID=local') assert resp.json['data'] @@ -209,13 +222,13 @@ def test_list_invoices(mocked_post, mocked_get, invoice_service_wsdl, @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_get_invoice_details(mocked_post, mocked_get, invoice_service_wsdl, - connector, app): +def test_get_invoice_details(mocked_post, mocked_get, invoice_service_wsdl, connector, app): mocked_get.return_value = mock.Mock(content=invoice_service_wsdl) - mocked_post.return_value = mock.Mock(content=get_xml_file('readInvoicesResponse.xml'), - status_code=200, - headers={'Content-Type': 'text/xml'} - ) + mocked_post.return_value = mock.Mock( + content=get_xml_file('readInvoicesResponse.xml'), + status_code=200, + headers={'Content-Type': 'text/xml'}, + ) Link.objects.create(resource=connector, family_id='3264', name_id='local') resp = app.get('/maelis/test/regie/1/invoice/3264-53186?NameID=local') assert resp.json['data'] @@ -223,34 +236,43 @@ def test_get_invoice_details(mocked_post, mocked_get, invoice_service_wsdl, assert resp.json['data']['display_id'] == '53186' -@pytest.mark.parametrize('date, schoolyear', [ - ('2020-07-30', 2019), - ('2020-07-31', 2020), -]) +@pytest.mark.parametrize( + 'date, schoolyear', + [ + ('2020-07-30', 2019), + ('2020-07-31', 2020), + ], +) def test_get_school_year(date, schoolyear): date = parse_date(date) assert schoolyear == get_school_year(date) -@pytest.mark.parametrize('date, monday, sunday', [ - ('2020-12-27', '2020-12-21', '2020-12-27'), - ('2020-12-28', '2020-12-28', '2021-01-03'), - ('2020-12-31', '2020-12-28', '2021-01-03'), - ('2021-01-01', '2020-12-28', '2021-01-03'), - ('2021-01-03', '2020-12-28', '2021-01-03'), -]) +@pytest.mark.parametrize( + 'date, monday, sunday', + [ + ('2020-12-27', '2020-12-21', '2020-12-27'), + ('2020-12-28', '2020-12-28', '2021-01-03'), + ('2020-12-31', '2020-12-28', '2021-01-03'), + ('2021-01-01', '2020-12-28', '2021-01-03'), + ('2021-01-03', '2020-12-28', '2021-01-03'), + ], +) def test_week_boundaries(date, monday, sunday): start, end = week_boundaries_datetimes(date) assert start.strftime('%Y-%m-%d') == monday assert end.strftime('%Y-%m-%d') == sunday -@pytest.mark.parametrize('start, end, items', [ - ('2020-10-22', '2020-12-06', ['2020-10-01', '2020-11-01', '2020-12-01']), - ('2020-12-31', '2021-01-03', ['2020-12-01', '2021-01-01']), - ('2020-12-28', '2020-12-28', ['2020-12-01']), - ('2021-01-03', '2020-12-01', []), -]) +@pytest.mark.parametrize( + 'start, end, items', + [ + ('2020-10-22', '2020-12-06', ['2020-10-01', '2020-11-01', '2020-12-01']), + ('2020-12-31', '2021-01-03', ['2020-12-01', '2021-01-01']), + ('2020-12-28', '2020-12-28', ['2020-12-01']), + ('2021-01-03', '2020-12-01', []), + ], +) def test_month_range(start, end, items): start = parse_date(start) end = parse_date(end) @@ -268,14 +290,10 @@ def test_decompose_event(): assert not [x for x in data if x['text'] == 'REPAS'] # unit is break down into its components - assert [x['text'] for x in decompose_event(data[22])] == [ - 'Matinée', 'Repas', 'Après-midi'] - assert [x['text'] for x in decompose_event(data[23])] == [ - 'Matinée'] - assert [x['text'] for x in decompose_event(data[24])] == [ - 'Matinée', 'Repas'] - assert [x['text'] for x in decompose_event(data[25])] == [ - 'Après-midi'] + assert [x['text'] for x in decompose_event(data[22])] == ['Matinée', 'Repas', 'Après-midi'] + assert [x['text'] for x in decompose_event(data[23])] == ['Matinée'] + assert [x['text'] for x in decompose_event(data[24])] == ['Matinée', 'Repas'] + assert [x['text'] for x in decompose_event(data[25])] == ['Après-midi'] # child_planning function use a dict to remove dupplicated components data = {x['id']: x for e in data for x in decompose_event(e)}.values() @@ -285,41 +303,52 @@ def test_decompose_event(): @override_settings(TIME_ZONE='Europe/Paris') -@pytest.mark.parametrize('legacy, nb_events, nb_booked, response', [ - ('please', 43, 9, 'child_planning_before_decomposition'), - ('', 42, 9, 'child_planning_after_decomposition'), -]) +@pytest.mark.parametrize( + 'legacy, nb_events, nb_booked, response', + [ + ('please', 43, 9, 'child_planning_before_decomposition'), + ('', 42, 9, 'child_planning_after_decomposition'), + ], +) @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_get_child_planning(mocked_post, mocked_get, legacy, nb_events, nb_booked, response, - family_service_wsdl, activity_service_wsdl, app, connector): +def test_get_child_planning( + mocked_post, + mocked_get, + legacy, + nb_events, + nb_booked, + response, + family_service_wsdl, + activity_service_wsdl, + app, + connector, +): mocked_get.side_effect = ( utils.FakedResponse( - content=family_service_wsdl, - status_code=200, - headers={'Content-Type': 'text/xml'}), + content=family_service_wsdl, status_code=200, headers={'Content-Type': 'text/xml'} + ), utils.FakedResponse( - content=activity_service_wsdl, - status_code=200, - headers={'Content-Type': 'text/xml'}), + content=activity_service_wsdl, status_code=200, headers={'Content-Type': 'text/xml'} + ), utils.FakedResponse( - content=activity_service_wsdl, - status_code=200, - headers={'Content-Type': 'text/xml'}) + content=activity_service_wsdl, status_code=200, headers={'Content-Type': 'text/xml'} + ), ) mocked_post.side_effect = ( utils.FakedResponse( - content=get_xml_file('readFamily.xml'), - status_code=200, - headers={'Content-Type': 'text/xml'}), + content=get_xml_file('readFamily.xml'), status_code=200, headers={'Content-Type': 'text/xml'} + ), utils.FakedResponse( content=get_xml_file('child_planning_readActivityListResponse.xml'), status_code=200, - headers={'Content-Type': 'text/xml'}), + headers={'Content-Type': 'text/xml'}, + ), utils.FakedResponse( content=get_xml_file('child_planning_readChildMonthPlanningResponse.xml'), status_code=200, - headers={'Content-Type': 'text/xml'}) + headers={'Content-Type': 'text/xml'}, + ), ) Link.objects.create(resource=connector, family_id='3264', name_id='local') url = '/maelis/test/child-planning?NameID=local&childID=21293&start_date=2020-12-19' @@ -338,16 +367,28 @@ def test_get_child_planning(mocked_post, mocked_get, legacy, nb_events, nb_booke assert resp.json == get_json_file(response) -@pytest.mark.parametrize('parameters, nb_subscribed, nb_not_subscribed', [ - ('&subscribingStatus=', 2, 18), - ('&subscribingStatus=subscribed', 2, 0), - ('&subscribingStatus=not-subscribed', 0, 17), - ('&subscribePublication=ELN', 2, 48), -]) +@pytest.mark.parametrize( + 'parameters, nb_subscribed, nb_not_subscribed', + [ + ('&subscribingStatus=', 2, 18), + ('&subscribingStatus=subscribed', 2, 0), + ('&subscribingStatus=not-subscribed', 0, 17), + ('&subscribePublication=ELN', 2, 48), + ], +) @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_child_activities(mocked_post, mocked_get, parameters, nb_subscribed, nb_not_subscribed, - catalog_mocked_get, catalog_mocked_post, connector, app): +def test_child_activities( + mocked_post, + mocked_get, + parameters, + nb_subscribed, + nb_not_subscribed, + catalog_mocked_get, + catalog_mocked_post, + connector, + app, +): mocked_get.side_effect = catalog_mocked_get mocked_post.side_effect = catalog_mocked_post Link.objects.create(resource=connector, family_id='3264', name_id='local') @@ -361,16 +402,20 @@ def test_child_activities(mocked_post, mocked_get, parameters, nb_subscribed, nb assert len([x for x in status if x == 'not-subscribed']) == nb_not_subscribed -@pytest.mark.parametrize('parameters, err_desc', [ - ('&childID=99999', 'Child not found'), - ('&subscribingStatus=not-a-status', 'wrong value for subscribingStatus'), - ("&queryDate=2020-02-31", 'not a valid date'), - ("&queryDate=not-a-date", 'YYYY-MM-DD expected'), -]) +@pytest.mark.parametrize( + 'parameters, err_desc', + [ + ('&childID=99999', 'Child not found'), + ('&subscribingStatus=not-a-status', 'wrong value for subscribingStatus'), + ("&queryDate=2020-02-31", 'not a valid date'), + ("&queryDate=not-a-date", 'YYYY-MM-DD expected'), + ], +) @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_child_activities_errors(mocked_post, mocked_get, parameters, err_desc, - catalog_mocked_get, catalog_mocked_post, connector, app): +def test_child_activities_errors( + mocked_post, mocked_get, parameters, err_desc, catalog_mocked_get, catalog_mocked_post, connector, app +): mocked_get.side_effect = catalog_mocked_get mocked_post.side_effect = catalog_mocked_post Link.objects.create(resource=connector, family_id='3264', name_id='local') @@ -383,62 +428,62 @@ def test_child_activities_errors(mocked_post, mocked_get, parameters, err_desc, @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_subscribe(mocked_post, mocked_get, family_service_wsdl, - connector, app): +def test_subscribe(mocked_post, mocked_get, family_service_wsdl, connector, app): mocked_get.return_value = mock.Mock(content=family_service_wsdl) mocked_post.side_effect = ( utils.FakedResponse( - content=get_xml_file('readFamily.xml'), - status_code=200, - headers={'Content-Type': 'text/xml'}), + content=get_xml_file('readFamily.xml'), status_code=200, headers={'Content-Type': 'text/xml'} + ), utils.FakedResponse( content=get_xml_file('subscribeActivityResult.xml'), status_code=200, - headers={'Content-Type': 'text/xml'}) + headers={'Content-Type': 'text/xml'}, + ), ) Link.objects.create(resource=connector, family_id='3264', name_id='local') - resp = app.get('/maelis/test/subscribe/?NameID=local&childID=21293' - + '&activityID=A10003123507&unitID=A10003123507&placeID=A10000000211' - + '&weeklyPlanning=XX1XX11&start_date=2020-08-01&end_date=2021-07-31') + resp = app.get( + '/maelis/test/subscribe/?NameID=local&childID=21293' + + '&activityID=A10003123507&unitID=A10003123507&placeID=A10000000211' + + '&weeklyPlanning=XX1XX11&start_date=2020-08-01&end_date=2021-07-31' + ) assert not resp.json['err'] - assert resp.json['data']['state'] == { - "idState" : "1", - "isWaitState" : False, - "libelle" : "Confirmé" - } + assert resp.json['data']['state'] == {"idState": "1", "isWaitState": False, "libelle": "Confirmé"} @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_unsubscribe(mocked_post, mocked_get, family_service_wsdl, - connector, app): +def test_unsubscribe(mocked_post, mocked_get, family_service_wsdl, connector, app): mocked_get.return_value = mock.Mock(content=family_service_wsdl) mocked_post.side_effect = ( utils.FakedResponse( - content=get_xml_file('readFamily.xml'), - status_code=200, - headers={'Content-Type': 'text/xml'}), + content=get_xml_file('readFamily.xml'), status_code=200, headers={'Content-Type': 'text/xml'} + ), utils.FakedResponse( - content=get_xml_file('deletesubscribe.xml'), - status_code=200, - headers={'Content-Type': 'text/xml'}) + content=get_xml_file('deletesubscribe.xml'), status_code=200, headers={'Content-Type': 'text/xml'} + ), ) Link.objects.create(resource=connector, family_id='3264', name_id='local') - resp = app.get('/maelis/test/unsubscribe/?NameID=local&childID=21293' - + '&activityID=A10003121692&start_date=2020-08-01') + resp = app.get( + '/maelis/test/unsubscribe/?NameID=local&childID=21293' + + '&activityID=A10003121692&start_date=2020-08-01' + ) assert not resp.json['err'] assert resp.json['data'] is None -@pytest.mark.parametrize('parameters, nb_bus_lines', [ - ('&direction=', 2), - ('&direction=Aller', 1), - ('&direction=retour', 1), -]) +@pytest.mark.parametrize( + 'parameters, nb_bus_lines', + [ + ('&direction=', 2), + ('&direction=Aller', 1), + ('&direction=retour', 1), + ], +) @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_bus_lines(mocked_post, mocked_get, parameters, nb_bus_lines, - catalog_mocked_get, catalog_mocked_post, connector, app): +def test_bus_lines( + mocked_post, mocked_get, parameters, nb_bus_lines, catalog_mocked_get, catalog_mocked_post, connector, app +): mocked_get.side_effect = catalog_mocked_get mocked_post.side_effect = catalog_mocked_post Link.objects.create(resource=connector, family_id='3264', name_id='local') @@ -451,16 +496,20 @@ def test_bus_lines(mocked_post, mocked_get, parameters, nb_bus_lines, assert len(resp.json['data']) == nb_bus_lines -@pytest.mark.parametrize('parameters, err_desc', [ - ('&childID=99999', 'Child not found'), - ('&queryDate=2020-02-31', 'not a valid date'), - ('&queryDate=not-a-date', 'YYYY-MM-DD expected'), - ('&direction=heaven', 'wrong value for direction'), -]) +@pytest.mark.parametrize( + 'parameters, err_desc', + [ + ('&childID=99999', 'Child not found'), + ('&queryDate=2020-02-31', 'not a valid date'), + ('&queryDate=not-a-date', 'YYYY-MM-DD expected'), + ('&direction=heaven', 'wrong value for direction'), + ], +) @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_bus_lines_errors(mocked_post, mocked_get, parameters, err_desc, - catalog_mocked_get, catalog_mocked_post, connector, app): +def test_bus_lines_errors( + mocked_post, mocked_get, parameters, err_desc, catalog_mocked_get, catalog_mocked_post, connector, app +): mocked_get.side_effect = catalog_mocked_get mocked_post.side_effect = catalog_mocked_post Link.objects.create(resource=connector, family_id='3264', name_id='local') @@ -473,8 +522,7 @@ def test_bus_lines_errors(mocked_post, mocked_get, parameters, err_desc, @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_bus_stops(mocked_post, mocked_get, - catalog_mocked_get, catalog_mocked_post, connector, app): +def test_bus_stops(mocked_post, mocked_get, catalog_mocked_get, catalog_mocked_post, connector, app): mocked_get.side_effect = catalog_mocked_get mocked_post.side_effect = catalog_mocked_post Link.objects.create(resource=connector, family_id='3264', name_id='local') @@ -485,17 +533,21 @@ def test_bus_stops(mocked_post, mocked_get, assert len(resp.json['data']) == 6 -@pytest.mark.parametrize('parameters, err_desc', [ - ('&childID=99999', 'Child not found'), - ('&queryDate=2020-02-31', 'not a valid date'), - ('&queryDate=not-a-date', 'YYYY-MM-DD expected'), - ('&busActivityID=1', 'Bus activity not found: 1'), - ('&busUnitID=2', 'Bus unit not found: 2'), -]) +@pytest.mark.parametrize( + 'parameters, err_desc', + [ + ('&childID=99999', 'Child not found'), + ('&queryDate=2020-02-31', 'not a valid date'), + ('&queryDate=not-a-date', 'YYYY-MM-DD expected'), + ('&busActivityID=1', 'Bus activity not found: 1'), + ('&busUnitID=2', 'Bus unit not found: 2'), + ], +) @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -def test_bus_stops_errors(mocked_post, mocked_get, parameters, err_desc, - catalog_mocked_get, catalog_mocked_post, connector, app): +def test_bus_stops_errors( + mocked_post, mocked_get, parameters, err_desc, catalog_mocked_get, catalog_mocked_post, connector, app +): mocked_get.side_effect = catalog_mocked_get mocked_post.side_effect = catalog_mocked_post Link.objects.create(resource=connector, family_id='3264', name_id='local') diff --git a/tests/test_manager.py b/tests/test_manager.py index f1ca2a39..76701392 100644 --- a/tests/test_manager.py +++ b/tests/test_manager.py @@ -27,13 +27,16 @@ def login(app, username='admin', password='admin'): assert resp.status_int == 302 return app + def test_homepage_redirect(app): assert app.get('/', status=302).location.endswith('/manage/') + def test_unlogged_access(app): # connect while not being logged in assert app.get('/manage/', status=302).location.endswith('/login/?next=/manage/') + def test_simple_user_access(app, simple_user): # connect while being logged as a simple user app = login(app, username='user', password='user') @@ -42,12 +45,14 @@ def test_simple_user_access(app, simple_user): assert app.get('/manage/ovh/add', status=403) assert app.get('/manage/access/', status=403) + def test_access(app, admin_user): app = login(app) resp = app.get('/manage/', status=200) assert 'Add Connector' in resp.text assert app.get('/manage/access/', status=200) + def test_add_connector(app, admin_user): app = login(app) resp = app.get('/manage/', status=200) @@ -68,6 +73,7 @@ def test_add_connector(app, admin_user): resp = app.get('/manage/', status=200) assert 'Test Connector' in resp.text + def test_add_connector_unique_slug(app, admin_user): app = login(app) resp = app.get('/manage/', status=200) @@ -87,6 +93,7 @@ def test_add_connector_unique_slug(app, admin_user): resp2 = resp.forms[0].submit() assert resp2.status_int == 302 + def test_visit_connectors(app, admin_user): app = login(app) resp = app.get('/manage/', status=200) @@ -94,6 +101,7 @@ def test_visit_connectors(app, admin_user): for link in re.findall('href="(/manage.*add)"', resp.text): resp = app.get(link, status=200) + def test_access_management(app, admin_user): assert ApiUser.objects.count() == 0 app = login(app) @@ -137,22 +145,27 @@ def test_menu_json(app, admin_user): def test_logs(app, admin_user): data = StringIO('1;Foo\n2;Bar\n3;Baz') - csv = CsvDataSource.objects.create(csv_file=File(data, 't.csv'), - columns_keynames='id, text', slug='test', title='a title', description='a description') + csv = CsvDataSource.objects.create( + csv_file=File(data, 't.csv'), + columns_keynames='id, text', + slug='test', + title='a title', + description='a description', + ) query = Query(slug='fooba', resource=csv, structure='array') query.projections = '\n'.join(['id:int(id)', 'text:text']) query.save() - api = ApiUser.objects.create(username='public', - fullname='public', - description='access for all', - keytype='', key='') + api = ApiUser.objects.create( + username='public', fullname='public', description='access for all', keytype='', key='' + ) obj_type = ContentType.objects.get_for_model(csv) - AccessRight.objects.create(codename='can_access', - apiuser=api, - resource_type=obj_type, - resource_pk=csv.pk, + AccessRight.objects.create( + codename='can_access', + apiuser=api, + resource_type=obj_type, + resource_pk=csv.pk, ) app = login(app) @@ -223,8 +236,12 @@ def test_logs(app, admin_user): assert resp.text.count('level-info') == 2 with freezegun.freeze_time('2020-10-06 14:08:12'): - log1 = ResourceLog.objects.create(appname=csv.get_connector_slug(), slug=csv.slug, levelno=50, message='hop') - log2 = ResourceLog.objects.create(appname=csv.get_connector_slug(), slug=csv.slug, levelno=42, message='hop') + log1 = ResourceLog.objects.create( + appname=csv.get_connector_slug(), slug=csv.slug, levelno=50, message='hop' + ) + log2 = ResourceLog.objects.create( + appname=csv.get_connector_slug(), slug=csv.slug, levelno=42, message='hop' + ) resp = app.get('/manage/csvdatasource/test/logs/%s/' % log1.pk) assert 'title="Critical - Oct. 6, 2020 14:08:12"' in resp.text resp = app.get('/manage/csvdatasource/test/logs/%s/' % log2.pk) @@ -237,16 +254,24 @@ def test_logs_search(app, admin_user): transaction_id = str(uuid.uuid4()) log1 = ResourceLog.objects.create( - appname=csv.get_connector_slug(), slug=csv.slug, levelno=1, message='hop', + appname=csv.get_connector_slug(), + slug=csv.slug, + levelno=1, + message='hop', transaction_id=transaction_id, - extra={'transaction_id': transaction_id, 'foo': 'plop'}) + extra={'transaction_id': transaction_id, 'foo': 'plop'}, + ) log2 = ResourceLog.objects.create( - appname=csv.get_connector_slug(), slug=csv.slug, levelno=1, message='plop', + appname=csv.get_connector_slug(), + slug=csv.slug, + levelno=1, + message='plop', transaction_id=transaction_id, - extra={'bar': 'hop'}) + extra={'bar': 'hop'}, + ) log3 = ResourceLog.objects.create( - appname=csv.get_connector_slug(), slug=csv.slug, levelno=1, message='foo', - extra={'bar': 'hop'}) + appname=csv.get_connector_slug(), slug=csv.slug, levelno=1, message='foo', extra={'bar': 'hop'} + ) resp = app.get('/manage/csvdatasource/t/logs/') assert list(resp.context['page_obj'].object_list) == [log3, log2, log1] @@ -278,8 +303,13 @@ def test_logs_search(app, admin_user): def test_logging_parameters(app, admin_user): data = StringIO('1;Foo\n2;Bar\n3;Baz') - csv = CsvDataSource.objects.create(csv_file=File(data, 't.csv'), - columns_keynames='id, text', slug='test', title='a title', description='a description') + csv = CsvDataSource.objects.create( + csv_file=File(data, 't.csv'), + columns_keynames='id, text', + slug='test', + title='a title', + description='a description', + ) app = login(app) resp = app.get(csv.get_absolute_url()) resp = resp.click('Logging parameters') @@ -308,7 +338,11 @@ def test_availability_parameters(app, admin_user, monkeypatch): data = StringIO('1;Foo\n2;Bar\n3;Baz') csv = CsvDataSource.objects.create( csv_file=File(data, 't.csv'), - columns_keynames='id, text', slug='test', title='a title', description='a description') + columns_keynames='id, text', + slug='test', + title='a title', + description='a description', + ) app = login(app) resp = app.get(csv.get_absolute_url()) @@ -339,9 +373,7 @@ def test_availability_parameters(app, admin_user, monkeypatch): # Connector down resource_type = ContentType.objects.get_for_model(csv) - status = ResourceStatus( - resource_type=resource_type, resource_pk=csv.pk, - status='down', message='') + status = ResourceStatus(resource_type=resource_type, resource_pk=csv.pk, status='down', message='') status.save() assert csv.down() resp = app.get(csv.get_absolute_url()) @@ -372,18 +404,23 @@ def test_availability_parameters(app, admin_user, monkeypatch): def test_jobs(app, admin_user): data = StringIO('1;Foo\n2;Bar\n3;Baz') - csv = CsvDataSource.objects.create(csv_file=File(data, 't.csv'), - columns_keynames='id, text', slug='test', title='a title', description='a description') + csv = CsvDataSource.objects.create( + csv_file=File(data, 't.csv'), + columns_keynames='id, text', + slug='test', + title='a title', + description='a description', + ) - api = ApiUser.objects.create(username='public', - fullname='public', - description='access for all', - keytype='', key='') + api = ApiUser.objects.create( + username='public', fullname='public', description='access for all', keytype='', key='' + ) obj_type = ContentType.objects.get_for_model(csv) - AccessRight.objects.create(codename='can_access', - apiuser=api, - resource_type=obj_type, - resource_pk=csv.pk, + AccessRight.objects.create( + codename='can_access', + apiuser=api, + resource_type=obj_type, + resource_pk=csv.pk, ) app = login(app) @@ -438,7 +475,11 @@ def test_job_restart(app, admin_user): data = StringIO('1;Foo\n2;Bar\n3;Baz') csv = CsvDataSource.objects.create( csv_file=File(data, 't.csv'), - columns_keynames='id, text', slug='test', title='a title', description='a description') + columns_keynames='id, text', + slug='test', + title='a title', + description='a description', + ) app = login(app) # unknown job @@ -476,19 +517,29 @@ def test_job_restart(app, admin_user): def test_manager_import_export(app, admin_user): data = StringIO('1;Foo\n2;Bar\n3;Baz') - csv = CsvDataSource.objects.create(csv_file=File(data, 't.csv'), - columns_keynames='id, text', slug='test', title='a title', description='a description') - csv2 = CsvDataSource.objects.create(csv_file=File(data, 't.csv'), - columns_keynames='id, text', slug='test2', title='a title', description='a description') - api = ApiUser.objects.create(username='public', - fullname='public', - description='access for all', - keytype='', key='') + csv = CsvDataSource.objects.create( + csv_file=File(data, 't.csv'), + columns_keynames='id, text', + slug='test', + title='a title', + description='a description', + ) + csv2 = CsvDataSource.objects.create( + csv_file=File(data, 't.csv'), + columns_keynames='id, text', + slug='test2', + title='a title', + description='a description', + ) + api = ApiUser.objects.create( + username='public', fullname='public', description='access for all', keytype='', key='' + ) obj_type = ContentType.objects.get_for_model(csv) - AccessRight.objects.create(codename='can_access', - apiuser=api, - resource_type=obj_type, - resource_pk=csv.pk, + AccessRight.objects.create( + codename='can_access', + apiuser=api, + resource_type=obj_type, + resource_pk=csv.pk, ) # export site @@ -528,15 +579,16 @@ def test_manager_import_export(app, admin_user): resp = resp.click('Export') assert resp.headers['content-type'] == 'application/json' assert resp.headers['content-disposition'] == 'attachment; filename="export_%s_%s_20200901.json"' % ( - csv.get_connector_slug(), csv.slug) + csv.get_connector_slug(), + csv.slug, + ) connector_export = resp.text # import connector csv.delete() resp = app.get('/manage/', status=200) resp = resp.click('Import') - resp.form['site_json'] = Upload('export.json', connector_export.encode('utf-8'), - 'application/json') + resp.form['site_json'] = Upload('export.json', connector_export.encode('utf-8'), 'application/json') resp = resp.form.submit().follow() assert CsvDataSource.objects.count() == 2 assert CsvDataSource.objects.filter(slug='test').exists() @@ -599,8 +651,7 @@ def test_manager_open_access_flag(app, admin_user): assert not 'open access' in resp.text obj_type = ContentType.objects.get_for_model(csv) - AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, - resource_pk=csv.pk) + AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=csv.pk) resp = app.get('/manage/', status=200) assert not 'open access' in resp.text diff --git a/tests/test_mdel.py b/tests/test_mdel.py index 52d4c6fd..d5ae5a6f 100644 --- a/tests/test_mdel.py +++ b/tests/test_mdel.py @@ -61,7 +61,7 @@ def check_zip_file(zipdir, expected_files): # check files order in zip zipres = zipfile.ZipFile(zipdir, 'r') files = [f.filename for f in zipres.infolist()] - assert files[:len(expected_files)] == expected_files + assert files[: len(expected_files)] == expected_files @pytest.fixture @@ -69,11 +69,14 @@ def setup(db): return utils.setup_access_rights(MDEL.objects.create(slug='test')) -@pytest.fixture(params=[ - json_loads(get_file_from_test_base_dir('formdata_aec_naiss.json')), - json_loads(get_file_from_test_base_dir('formdata_aec_mariage.json')), - json_loads(get_file_from_test_base_dir('formdata_aec_deces.json')), - ], ids=['naissance', 'mariage', 'deces']) +@pytest.fixture( + params=[ + json_loads(get_file_from_test_base_dir('formdata_aec_naiss.json')), + json_loads(get_file_from_test_base_dir('formdata_aec_mariage.json')), + json_loads(get_file_from_test_base_dir('formdata_aec_deces.json')), + ], + ids=['naissance', 'mariage', 'deces'], +) def aec_payload(request): return request.param @@ -91,12 +94,11 @@ def test_message(): routing = xml.find('mdel:Header', ns).find('mdel:Routing', ns) assert routing.find('mdel:MessageId', ns).text == '77' assert routing.find('mdel:FlowType', ns).text == 'ILE-LA' - aller = xml.find('mdel:Body', ns)\ - .find('mdel:Content', ns)\ - .find('mdel:Aller', ns) + aller = xml.find('mdel:Body', ns).find('mdel:Content', ns).find('mdel:Aller', ns) assert len(list(aller)) == 3 - assert aller.find('Document').find('FichierFormulaire')\ - .find('FichierDonnees').text == '77-ILE-LA-doc-.xml' + assert ( + aller.find('Document').find('FichierFormulaire').find('FichierDonnees').text == '77-ILE-LA-doc-.xml' + ) assert aller.find('Teledemarche/IdentifiantPlateforme').text == '1' @@ -107,13 +109,11 @@ def test_description(): AttachedFile('JD', 'energy_bill', base64.b64encode(b'this is edf_mai_2016')), ] - description = Description( - 'ILE-LA', '77', '94600', attached_files=attached_files) + description = Description('ILE-LA', '77', '94600', attached_files=attached_files) xml = description.xml assert len(list(xml)) == 5 assert len(xml.findall('PieceJointe')) == 2 - assert xml.find('Document').find('FichierFormulaire')\ - .find('FichierDonnees').text == '77-ILE-LA-doc-.xml' + assert xml.find('Document').find('FichierFormulaire').find('FichierDonnees').text == '77-ILE-LA-doc-.xml' assert xml.find('Teledemarche/IdentifiantPlateforme').text == '1' @@ -148,8 +148,8 @@ def test_create_aec_demand_type(app, setup, aec_payload): doc = os.path.join(basedir, '15-4-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml') validate_schema(doc, AEC_XSD) check_zip_file( - basedir + '.zip', [ - 'message.xml', '15-4-EtatCivil-ent-0.xml', '15-4-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml'] + basedir + '.zip', + ['message.xml', '15-4-EtatCivil-ent-0.xml', '15-4-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml'], ) root = etree.parse(doc).getroot() @@ -174,7 +174,10 @@ def test_create_aec_demand_type(app, setup, aec_payload): assert root.find('DemandeActe/Demandeur/Telephone').text == '0122334455' assert root.find('DemandeActe/Demandeur/QualiteDemandeur/Code').text == 'Fils' assert root.find('DemandeActe/Demandeur/AdresseEtrangere/Pays').text == 'Suisse' - assert root.find('DemandeActe/Demandeur/AdresseEtrangere/Adresse').text == '3ème, Bâtiment B, 37 rue de Paris, 3800 Bern' + assert ( + root.find('DemandeActe/Demandeur/AdresseEtrangere/Adresse').text + == '3ème, Bâtiment B, 37 rue de Paris, 3800 Bern' + ) assert not root.find('DemandeActe/Demandeur/AdresseFrancaise/CodePostal') assert not root.find('DemandeActe/Demandeur/AdresseFrancaise/Ville') @@ -201,8 +204,8 @@ def test_create_aec_demand_type(app, setup, aec_payload): doc = os.path.join(basedir, '16-1-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml') validate_schema(doc, AEC_XSD) check_zip_file( - basedir + '.zip', [ - 'message.xml', '16-1-EtatCivil-ent-0.xml', '16-1-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml'] + basedir + '.zip', + ['message.xml', '16-1-EtatCivil-ent-0.xml', '16-1-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml'], ) validate_schema(doc, AEC_XSD) @@ -261,8 +264,8 @@ def test_create_aec_demand_type(app, setup, aec_payload): doc = os.path.join(basedir, '17-1-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml') validate_schema(doc, AEC_XSD) check_zip_file( - basedir + '.zip', [ - 'message.xml', '17-1-EtatCivil-ent-0.xml', '17-1-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml'] + basedir + '.zip', + ['message.xml', '17-1-EtatCivil-ent-0.xml', '17-1-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml'], ) validate_schema(doc, AEC_XSD) @@ -307,7 +310,9 @@ def test_create_aec_demand_type(app, setup, aec_payload): def test_create_aec_demand_with_output_sftp(app, setup, aec_payload, sftpserver): - setup.outcoming_sftp = SFTP('sftp://foo:bar@{server.host}:{server.port}/output/'.format(server=sftpserver)) + setup.outcoming_sftp = SFTP( + 'sftp://foo:bar@{server.host}:{server.port}/output/'.format(server=sftpserver) + ) setup.save() resp = app.post_json('/mdel/test/create', params=aec_payload, status=200) expected_filename = '%s-0.zip' % resp.json['data']['demand_id'] @@ -366,7 +371,10 @@ def test_create_ile_demand_type(app, setup): assert root.find('SituationElectoraleAnterieure/SituationDeLElecteur').text == 'cci' assert root.find('SituationElectoraleAnterieure/PaysUeDerniereInscription/Pays').text == 'BEL' assert root.find('SituationElectoraleAnterieure/PaysUeDerniereInscription/Localite').text == 'Bruxelles' - assert root.find('SituationElectoraleAnterieure/PaysUeDerniereInscription/DivisionTerritoriale').text == 'Whatever' + assert ( + root.find('SituationElectoraleAnterieure/PaysUeDerniereInscription/DivisionTerritoriale').text + == 'Whatever' + ) # checking that attached files are referenced in -ent-.xml file desc = os.path.join(base_doc, '1-14-ILE-LA-ent-.xml') @@ -378,7 +386,10 @@ def test_create_ile_demand_type(app, setup): assert root.find('ns2:Routage/ns2:Donnee/ns2:Valeur', namespaces=ns).text == '54395' assert root.find('ns2:Document/ns2:Code', namespaces=ns).text == '1-14-ILE-LA' assert root.find('ns2:Document/ns2:Nom', namespaces=ns).text == '1-14-ILE-LA' - assert root.find('ns2:Document/ns2:FichierFormulaire/ns2:FichierDonnees', namespaces=ns).text == '1-14-ILE-LA-doc-.xml' + assert ( + root.find('ns2:Document/ns2:FichierFormulaire/ns2:FichierDonnees', namespaces=ns).text + == '1-14-ILE-LA-doc-.xml' + ) attached_files = root.findall('ns2:PieceJointe', namespaces=ns) assert len(attached_files) == 3 @@ -395,9 +406,14 @@ def test_create_ile_demand_type(app, setup): assert afile.find('ns2:Code', namespaces=ns).text == 'JD' assert afile.find('ns2:Fichier', namespaces=ns).text == 'mdel_edf.pdf' - expected_files = ['message.xml', '1-14-ILE-LA-doc-.xml', - '1-14-ILE-LA-ent-.xml', 'mdel_passeport_recto.pdf', - 'mdel_passeport_verso.pdf', 'mdel_edf.pdf'] + expected_files = [ + 'message.xml', + '1-14-ILE-LA-doc-.xml', + '1-14-ILE-LA-ent-.xml', + 'mdel_passeport_recto.pdf', + 'mdel_passeport_verso.pdf', + 'mdel_edf.pdf', + ] for fname in os.listdir(base_doc): assert fname in expected_files @@ -427,7 +443,8 @@ def test_create_ile_demand_type_invalid_document_proof(app, setup): def test_get_status(app, setup): shutil.copytree( os.path.join(get_mdel_base_dir(), 'test', 'outputs'), - os.path.join(get_resource_base_dir(), 'test', 'outputs')) + os.path.join(get_resource_base_dir(), 'test', 'outputs'), + ) resp = app.post_json('/mdel/test/create', params=ILE_PAYLOAD, status=200) demand_id = resp.json['data']['demand_id'] assert demand_id == '1-14-ILE-LA' @@ -468,7 +485,8 @@ def test_get_status_unknown_demand(app, setup): def test_get_status_no_response(app, setup): shutil.copytree( os.path.join(get_mdel_base_dir(), 'test', 'outputs'), - os.path.join(get_resource_base_dir(), 'test', 'outputs')) + os.path.join(get_resource_base_dir(), 'test', 'outputs'), + ) Demand.objects.create(resource=setup, num='1-15', flow_type='ILE-LA', demand_id='1-15-ILE-LA') resp = app.get('/mdel/test/status', params={'demand_id': '1-15-ILE-LA'}, status=200) @@ -483,7 +501,8 @@ def test_get_status_no_response(app, setup): def test_get_not_closed_status(app, setup): shutil.copytree( os.path.join(get_mdel_base_dir(), 'test', 'outputs'), - os.path.join(get_resource_base_dir(), 'test', 'outputs')) + os.path.join(get_resource_base_dir(), 'test', 'outputs'), + ) Demand.objects.create(resource=setup, num='15-9', flow_type='AEC-LA', demand_id='15-9-AEC-LA') resp = app.get('/mdel/test/status', params={'demand_id': '15-9-AEC-LA'}, status=200) @@ -535,14 +554,20 @@ def test_data_source_certificate_types(app, setup): assert datum['text'] == 'Extrait plurilingue' # test without COPIE-INTEGRALE and EXTRAIT-AVEC-FILIATION - resp = app.get('/mdel/test/certificate-types', params={'without': 'COPIE-INTEGRALE,EXTRAIT-AVEC-FILIATION'}, status=200) + resp = app.get( + '/mdel/test/certificate-types', + params={'without': 'COPIE-INTEGRALE,EXTRAIT-AVEC-FILIATION'}, + status=200, + ) data = resp.json['data'] assert len(data) == 2 assert 'EXTRAIT-SANS-FILIATION' in (data[0]['id'], data[1]['id']) assert 'EXTRAIT-PLURILINGUE' in (data[0]['id'], data[1]['id']) + def test_date_parsing(): from passerelle.utils.jsonresponse import APIError + with pytest.raises(APIError) as error: date = parse_date('2018-02-29') assert 'day is out of range for month' in str(error) @@ -563,7 +588,7 @@ def test_aec_filenames_and_routing(app, setup): assert os.path.exists(basedir) check_zip_file( basedir + '.zip', - ['message.xml', '15-4-EtatCivil-ent-0.xml', '15-4-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml'] + ['message.xml', '15-4-EtatCivil-ent-0.xml', '15-4-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml'], ) doc = os.path.join(basedir, '15-4-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml') @@ -579,23 +604,31 @@ def test_aec_filenames_and_routing(app, setup): ns = {'ns': 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier'} assert root.find('ns:Document/ns:Code', namespaces=ns).text == 'ActeEtatCivil-XML' assert root.find('ns:Document/ns:Nom', namespaces=ns).text == 'ActeEtatCivil-XML' - assert root.find('ns:Document/ns:FichierFormulaire/ns:FichierDonnees', namespaces=ns).text == \ - '15-4-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml' + assert ( + root.find('ns:Document/ns:FichierFormulaire/ns:FichierDonnees', namespaces=ns).text + == '15-4-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml' + ) message = os.path.join(basedir, 'message.xml') assert os.path.exists(message) root = letree.parse(message).getroot() ns = { 'ns1': 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier', - 'ns2': 'http://finances.gouv.fr/dgme/pec/message/v1' + 'ns2': 'http://finances.gouv.fr/dgme/pec/message/v1', } - assert root.find('ns2:Body/ns2:Content/ns2:Aller/ns1:NumeroDemarche', namespaces=ns).text \ - == 'EtatCivil' - assert root.find('ns2:Body/ns2:Content/ns2:Aller/ns1:Document/ns1:Code', namespaces=ns).text \ + assert root.find('ns2:Body/ns2:Content/ns2:Aller/ns1:NumeroDemarche', namespaces=ns).text == 'EtatCivil' + assert ( + root.find('ns2:Body/ns2:Content/ns2:Aller/ns1:Document/ns1:Code', namespaces=ns).text == 'ActeEtatCivil-XML' - assert root.find('ns2:Body/ns2:Content/ns2:Aller/ns1:Document/ns1:Nom', namespaces=ns).text \ + ) + assert ( + root.find('ns2:Body/ns2:Content/ns2:Aller/ns1:Document/ns1:Nom', namespaces=ns).text == 'ActeEtatCivil-XML' - assert root.find( - 'ns2:Body/ns2:Content/ns2:Aller/ns1:Document/ns1:FichierFormulaire/ns1:FichierDonnees', - namespaces=ns - ).text == '15-4-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml' + ) + assert ( + root.find( + 'ns2:Body/ns2:Content/ns2:Aller/ns1:Document/ns1:FichierFormulaire/ns1:FichierDonnees', + namespaces=ns, + ).text + == '15-4-EtatCivil-doc-ActeEtatCivil-XML-1-0.xml' + ) diff --git a/tests/test_mdel_ddpacs.py b/tests/test_mdel_ddpacs.py index 58ca7c1e..47c9f171 100644 --- a/tests/test_mdel_ddpacs.py +++ b/tests/test_mdel_ddpacs.py @@ -39,12 +39,15 @@ def build_response_zip(**kwargs): @pytest.fixture(autouse=True) def resource(db): - return utils.setup_access_rights(Resource.objects.create( - slug='test', - code_insee='66666', - recipient_siret='999999', - recipient_service='SERVICE', - recipient_guichet='GUICHET')) + return utils.setup_access_rights( + Resource.objects.create( + slug='test', + code_insee='66666', + recipient_siret='999999', + recipient_service='SERVICE', + recipient_guichet='GUICHET', + ) + ) @pytest.fixture @@ -95,8 +98,8 @@ def test_create_demand(app, resource, ddpacs_payload, freezer, sftpserver, caplo with sftpserver.serve_content({'input': {}, 'output': {}}): content = sftpserver.content_provider.content_object resource.outgoing_sftp = sftp.SFTP( - 'sftp://john:doe@{server.host}:{server.port}/output/'.format( - server=sftpserver)) + 'sftp://john:doe@{server.host}:{server.port}/output/'.format(server=sftpserver) + ) resource.save() resource.jobs() assert not content['output'] @@ -122,8 +125,8 @@ def test_create_demand(app, resource, ddpacs_payload, freezer, sftpserver, caplo resource.hourly() resource.incoming_sftp = sftp.SFTP( - 'sftp://john:doe@{server.host}:{server.port}/input/'.format( - server=sftpserver)) + 'sftp://john:doe@{server.host}:{server.port}/input/'.format(server=sftpserver) + ) resource.save() response_name, response_content = build_response_zip( @@ -132,19 +135,16 @@ def test_create_demand(app, resource, ddpacs_payload, freezer, sftpserver, caplo step=1, old_step=1, etat=100, - commentaire='coucou') + commentaire='coucou', + ) content['input'][response_name] = response_content resource.hourly() assert resource.demand_set.get().status == 'closed' assert response_name not in content['input'] response_name, response_content = build_response_zip( - reference='A-1-1', - flow_type='depotDossierPACS', - step=1, - old_step=1, - etat=1, - commentaire='coucou') + reference='A-1-1', flow_type='depotDossierPACS', step=1, old_step=1, etat=1, commentaire='coucou' + ) content['input'][response_name] = response_content resource.hourly() assert 'unexpected file "A-1-1-depotDossierPACS-1.zip"' in caplog.messages[-1] @@ -159,10 +159,7 @@ def test_create_demand_double(app, resource, ddpacs_payload, freezer, sftpserver freezer.move_to('2019-01-01') # Push new demand - payload = { - 'display_id': '1-1', - 'PACS/convention/conventionType/aideMaterielMontant': 'None' - } + payload = {'display_id': '1-1', 'PACS/convention/conventionType/aideMaterielMontant': 'None'} ddpacs_payload.update(payload) assert Demand.objects.count() == 0 assert resource.jobs_set().count() == 0 diff --git a/tests/test_mdph13.py b/tests/test_mdph13.py index 74526bbf..5f9ee76f 100644 --- a/tests/test_mdph13.py +++ b/tests/test_mdph13.py @@ -39,158 +39,161 @@ EMAIL = 'john.doe@example.com' SECRET = 'secret' IP = '88.34.56.56' -VALID_RESPONSE = json.dumps({ - 'err': 0, - "data": { - "numero": FILE_NUMBER, - "beneficiaire": { - "nom": "Martini", - "prenom": "Alfonso", - "tel_mobile": "06 01 02 03 04", - "tel_fixe": "04.01.02.03.04", - "date_de_naissance": "1951-03-23", - "email": "martini.a@free.fr", - "entourage": [ +VALID_RESPONSE = json.dumps( + { + 'err': 0, + "data": { + "numero": FILE_NUMBER, + "beneficiaire": { + "nom": "Martini", + "prenom": "Alfonso", + "tel_mobile": "06 01 02 03 04", + "tel_fixe": "04.01.02.03.04", + "date_de_naissance": "1951-03-23", + "email": "martini.a@free.fr", + "entourage": [ + { + "role": "Père", + "nom": "DUPONT Henri", + "tel_mobile": "0123232323", + "tel_fixe": "0202020202", + "email": "henri.dupont@xyz.com", + }, + { + "role": "Mère", + "nom": "DUPONT Marie", + "tel_mobile": "0123232323", + "tel_fixe": "0202020202", + "email": "marie.dupont@xyz.com", + }, + { + "role": "Aidant", + "nom": "ROBERT Fanny", + "tel_mobile": "0123232323", + "tel_fixe": "0202020202", + "email": "frobert@xyz.com", + }, + ], + "adresse": { + "adresse_2": "Bliblibli", + "adresse_3": "Bliblibli", + "adresse_4": "CHEMIN DE LA CARRAIRE", + "adresse_5": "Bliblibli", + "code_postal": "13500", + "ville": "MARTIGUES", + }, + "incapacite": {"taux": "Taux >=80%", "date_fin_effet": "2019-06-30"}, + }, + "demandes": [ { - "role": "Père", - "nom": "DUPONT Henri", - "tel_mobile": "0123232323", - "tel_fixe": "0202020202", - "email": "henri.dupont@xyz.com" + "numero": "1544740", + "date_demande": "2015-11-26", + "type_demande": "Renouvellement", + "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", + "statut": "Instruction administrative terminée en attente de passage en évaluation", + "typologie": "Demande En Cours", + "date_decision": None, }, { - "role": "Mère", - "nom": "DUPONT Marie", - "tel_mobile": "0123232323", - "tel_fixe": "0202020202", - "email": "marie.dupont@xyz.com" + "numero": "1210524", + "date_demande": "2014-06-13", + "type_demande": "Renouvellement", + "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", + "statut": "Décision prononcée et expédition réalisée (traitement terminé)", + "typologie": "Traitée non expédiée", + "date_decision": "2014-07-10", + "date_debut_effet": "2014-08-01", + "date_fin_effet": "2016-05-01", }, { - "role": "Aidant", - "nom": "ROBERT Fanny", - "tel_mobile": "0123232323", - "tel_fixe": "0202020202", - "email": "frobert@xyz.com" - } + "numero": "1231345", + "date_demande": "2014-07-22", + "type_demande": "Recours Gracieux", + "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", + "statut": "Décision prononcée et expédition réalisée (traitement terminé)", + "typologie": "Traitée et expédiée", + "date_decision": "2014-09-17", + "date_debut_effet": "2014-08-01", + "date_fin_effet": "2016-05-01", + }, + { + "numero": "666660", + "date_demande": "2012-08-13", + "type_demande": "Recours Gracieux", + "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", + "statut": "Décision prononcée et expédition réalisée (traitement terminé)", + "typologie": "Traitée et expédiée", + "date_decision": "2012-09-26", + "date_debut_effet": "2012-07-19", + "date_fin_effet": "2014-08-01", + }, + { + "numero": "605280", + "date_demande": "2012-04-05", + "type_demande": "1ère demande", + "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", + "statut": "Décision prononcée et expédition réalisée (traitement terminé)", + "typologie": "Traitée et expédiée", + "date_decision": "2012-07-19", + "date_debut_effet": "2012-07-19", + "date_fin_effet": "2014-05-01", + }, + { + "numero": "1544741", + "date_demande": "2015-11-26", + "type_demande": "Renouvellement", + "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", + "statut": "Décision prononcée et expédition réalisée (traitement terminé)", + "typologie": "Traitée et expédiée", + "date_decision": "2015-12-22", + "date_debut_effet": "2016-05-01", + "date_fin_effet": "2026-05-01", + }, + { + "numero": "1210526", + "date_demande": "2014-06-13", + "type_demande": "Renouvellement", + "prestation": "Carte européenne de Stationnement", + "statut": "Décision prononcée et expédition réalisée (traitement terminé)", + "typologie": "Traitée et expédiée", + "date_decision": "2014-07-04", + "date_debut_effet": "2014-05-01", + "date_fin_effet": "2015-05-01", + }, + { + "numero": "605281", + "date_demande": "2012-04-05", + "type_demande": "1ère demande", + "prestation": "Carte européenne de Stationnement", + "statut": "Décision prononcée et expédition réalisée (traitement terminé)", + "typologie": "Traitée et expédiée", + "date_decision": "2012-07-04", + "date_debut_effet": "2012-05-01", + "date_fin_effet": "2014-05-01", + }, ], - "adresse": { - "adresse_2": "Bliblibli", - "adresse_3": "Bliblibli", - "adresse_4": "CHEMIN DE LA CARRAIRE", - "adresse_5": "Bliblibli", - "code_postal": "13500", - "ville": "MARTIGUES" - }, - "incapacite": { - "taux": "Taux >=80%", - "date_fin_effet": "2019-06-30" - } }, - "demandes": [ - { - "numero": "1544740", - "date_demande": "2015-11-26", - "type_demande": "Renouvellement", - "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", - "statut": "Instruction administrative terminée en attente de passage en évaluation", - "typologie": "Demande En Cours", - "date_decision": None, - }, - { - "numero": "1210524", - "date_demande": "2014-06-13", - "type_demande": "Renouvellement", - "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", - "statut": "Décision prononcée et expédition réalisée (traitement terminé)", - "typologie": "Traitée non expédiée", - "date_decision": "2014-07-10", - "date_debut_effet": "2014-08-01", - "date_fin_effet": "2016-05-01" - }, - { - "numero": "1231345", - "date_demande": "2014-07-22", - "type_demande": "Recours Gracieux", - "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", - "statut": "Décision prononcée et expédition réalisée (traitement terminé)", - "typologie": "Traitée et expédiée", - "date_decision": "2014-09-17", - "date_debut_effet": "2014-08-01", - "date_fin_effet": "2016-05-01" - }, - { - "numero": "666660", - "date_demande": "2012-08-13", - "type_demande": "Recours Gracieux", - "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", - "statut": "Décision prononcée et expédition réalisée (traitement terminé)", - "typologie": "Traitée et expédiée", - "date_decision": "2012-09-26", - "date_debut_effet": "2012-07-19", - "date_fin_effet": "2014-08-01" - }, - { - "numero": "605280", - "date_demande": "2012-04-05", - "type_demande": "1ère demande", - "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", - "statut": "Décision prononcée et expédition réalisée (traitement terminé)", - "typologie": "Traitée et expédiée", - "date_decision": "2012-07-19", - "date_debut_effet": "2012-07-19", - "date_fin_effet": "2014-05-01" - }, - { - "numero": "1544741", - "date_demande": "2015-11-26", - "type_demande": "Renouvellement", - "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", - "statut": "Décision prononcée et expédition réalisée (traitement terminé)", - "typologie": "Traitée et expédiée", - "date_decision": "2015-12-22", - "date_debut_effet": "2016-05-01", - "date_fin_effet": "2026-05-01" - }, - { - "numero": "1210526", - "date_demande": "2014-06-13", - "type_demande": "Renouvellement", - "prestation": "Carte européenne de Stationnement", - "statut": "Décision prononcée et expédition réalisée (traitement terminé)", - "typologie": "Traitée et expédiée", - "date_decision": "2014-07-04", - "date_debut_effet": "2014-05-01", - "date_fin_effet": "2015-05-01" - }, - { - "numero": "605281", - "date_demande": "2012-04-05", - "type_demande": "1ère demande", - "prestation": "Carte européenne de Stationnement", - "statut": "Décision prononcée et expédition réalisée (traitement terminé)", - "typologie": "Traitée et expédiée", - "date_decision": "2012-07-04", - "date_debut_effet": "2012-05-01", - "date_fin_effet": "2014-05-01" - } - ] } -}) +) DOSSIER_INCONNU = { 'status_code': 404, - 'content': json.dumps({ - 'err': 1, - 'err_code': 'dossier-inconnu', - }), + 'content': json.dumps( + { + 'err': 1, + 'err_code': 'dossier-inconnu', + } + ), } SECRET_INVALIDE = { 'status_code': 404, - 'content': json.dumps({ - 'err': 1, - 'err_code': 'secret-invalide', - }), + 'content': json.dumps( + { + 'err': 1, + 'err_code': 'secret-invalide', + } + ), } @@ -201,7 +204,8 @@ def mdph13(db): title='Test 1', slug='test1', description='Connecteur de test', - webservice_base_url='http://cd13.fr/') + webservice_base_url='http://cd13.fr/', + ) @pytest.fixture @@ -265,46 +269,77 @@ def test_call_situation_dossier_with_email_and_ip(mdph13, mock_http): def test_link_bad_file_number(mdph13): with pytest.raises(APIError) as e: - mdph13.link(request=None, NameID=NAME_ID, numero_dossier='x', secret=None, - date_de_naissance=None, email=None) + mdph13.link( + request=None, NameID=NAME_ID, numero_dossier='x', secret=None, date_de_naissance=None, email=None + ) assert str(e.value) == 'numero_dossier must be a number' def test_link_bad_date_de_naissance(mdph13): with pytest.raises(APIError) as e: - mdph13.link(request=None, NameID=NAME_ID, numero_dossier=FILE_NUMBER, secret=None, - date_de_naissance='34-45-6', email=None) + mdph13.link( + request=None, + NameID=NAME_ID, + numero_dossier=FILE_NUMBER, + secret=None, + date_de_naissance='34-45-6', + email=None, + ) assert str(e.value) == 'date_de_naissance must be a date YYYY-MM-DD' def test_link_bad_email(mdph13): with pytest.raises(APIError) as e: - mdph13.link(request=None, NameID=NAME_ID, numero_dossier=FILE_NUMBER, secret=None, - date_de_naissance=DOB_ISOFORMAT, email='xxx@@vvv') + mdph13.link( + request=None, + NameID=NAME_ID, + numero_dossier=FILE_NUMBER, + secret=None, + date_de_naissance=DOB_ISOFORMAT, + email='xxx@@vvv', + ) assert str(e.value) == 'email is not valid' def test_link_nok_dossier_inconnu(mdph13, mock_http): mock_http.add_response(DOSSIER_INCONNU) with pytest.raises(APIError) as e: - mdph13.link(request=None, NameID=NAME_ID, numero_dossier=FILE_NUMBER, secret=SECRET, - date_de_naissance=DOB_ISOFORMAT, email=EMAIL) + mdph13.link( + request=None, + NameID=NAME_ID, + numero_dossier=FILE_NUMBER, + secret=SECRET, + date_de_naissance=DOB_ISOFORMAT, + email=EMAIL, + ) assert str(e.value) == 'dossier-inconnu' def test_link_nok_secret_invalide(mdph13, mock_http): mock_http.add_response(SECRET_INVALIDE) with pytest.raises(APIError) as e: - mdph13.link(request=None, NameID=NAME_ID, numero_dossier=FILE_NUMBER, secret=SECRET, - date_de_naissance=DOB_ISOFORMAT, email=EMAIL) + mdph13.link( + request=None, + NameID=NAME_ID, + numero_dossier=FILE_NUMBER, + secret=SECRET, + date_de_naissance=DOB_ISOFORMAT, + email=EMAIL, + ) assert str(e.value) == 'secret-invalide' def test_link_numero_dont_match(mdph13, mock_http): mock_http.add_response(json.dumps({'err': 0, 'data': {'numero': '456'}})) with pytest.raises(APIError) as e: - mdph13.link(request=None, NameID=NAME_ID, numero_dossier=FILE_NUMBER, secret=SECRET, - date_de_naissance=DOB_ISOFORMAT, email=EMAIL) + mdph13.link( + request=None, + NameID=NAME_ID, + numero_dossier=FILE_NUMBER, + secret=SECRET, + date_de_naissance=DOB_ISOFORMAT, + email=EMAIL, + ) assert str(e.value) == 'numero-must-match-numero-dossier' @@ -312,20 +347,27 @@ def test_link_ok(mdph13, mock_http): # check first time link mock_http.add_response(VALID_RESPONSE) assert not Link.objects.count() - response = mdph13.link(request=None, NameID=NAME_ID, - numero_dossier=FILE_NUMBER, secret=SECRET, - date_de_naissance=DOB_ISOFORMAT, email=EMAIL, ip=IP) + response = mdph13.link( + request=None, + NameID=NAME_ID, + numero_dossier=FILE_NUMBER, + secret=SECRET, + date_de_naissance=DOB_ISOFORMAT, + email=EMAIL, + ip=IP, + ) link = Link.objects.get() - assert response == { - 'link_id': link.pk, - 'created': True, - 'updated': False - } + assert response == {'link_id': link.pk, 'created': True, 'updated': False} # check relinking with update mock_http.add_response(VALID_RESPONSE) - response = mdph13.link(request=None, NameID=NAME_ID, - numero_dossier=FILE_NUMBER, secret=SECRET + 'a', - date_de_naissance=DOB_ISOFORMAT, email=EMAIL) + response = mdph13.link( + request=None, + NameID=NAME_ID, + numero_dossier=FILE_NUMBER, + secret=SECRET + 'a', + date_de_naissance=DOB_ISOFORMAT, + email=EMAIL, + ) assert response == { 'link_id': link.pk, 'created': False, @@ -341,11 +383,8 @@ def test_unlink_nok_bad_link_id(mdph13): def test_unlink_ok(mdph13): link = Link.objects.create( - resource=mdph13, - name_id=NAME_ID, - file_number=FILE_NUMBER, - secret=SECRET, - dob=DOB) + resource=mdph13, name_id=NAME_ID, file_number=FILE_NUMBER, secret=SECRET, dob=DOB + ) result = mdph13.unlink(None, NAME_ID, str(link.pk)) assert result['deleted'] == 1 result = mdph13.unlink(None, NAME_ID, str(link.pk)) @@ -354,28 +393,15 @@ def test_unlink_ok(mdph13): def test_unlink_with_delete(mdph13, app): link = Link.objects.create( - resource=mdph13, - name_id=NAME_ID, - file_number=FILE_NUMBER, - secret=SECRET, - dob=DOB) + resource=mdph13, name_id=NAME_ID, file_number=FILE_NUMBER, secret=SECRET, dob=DOB + ) response = app.delete('/mdph13/%s/unlink/?NameID=%s&link_id=%s' % (mdph13.slug, NAME_ID, link.pk)) assert response.json['deleted'] == 1 def test_unlink_all_ok(mdph13): - Link.objects.create( - resource=mdph13, - name_id=NAME_ID, - file_number=FILE_NUMBER, - secret=SECRET, - dob=DOB) - Link.objects.create( - resource=mdph13, - name_id=NAME_ID, - file_number='12345', - secret=SECRET, - dob=DOB) + Link.objects.create(resource=mdph13, name_id=NAME_ID, file_number=FILE_NUMBER, secret=SECRET, dob=DOB) + Link.objects.create(resource=mdph13, name_id=NAME_ID, file_number='12345', secret=SECRET, dob=DOB) result = mdph13.unlink(None, NAME_ID, 'all') assert result['deleted'] == 2 result = mdph13.unlink(None, NAME_ID, 'all') @@ -384,11 +410,8 @@ def test_unlink_all_ok(mdph13): def test_dossier_ok(mdph13, mock_http): link = Link.objects.create( - resource=mdph13, - name_id=NAME_ID, - file_number=FILE_NUMBER, - secret=SECRET, - dob=DOB) + resource=mdph13, name_id=NAME_ID, file_number=FILE_NUMBER, secret=SECRET, dob=DOB + ) mock_http.add_response(VALID_RESPONSE) response = mdph13.dossiers(None, NAME_ID, EMAIL, ip=IP) assert response['data'] @@ -420,11 +443,8 @@ def test_dossier_ok(mdph13, mock_http): def test_dossier_with_link_id_ok(mdph13, mock_http): link = Link.objects.create( - resource=mdph13, - name_id=NAME_ID, - file_number=FILE_NUMBER, - secret=SECRET, - dob=DOB) + resource=mdph13, name_id=NAME_ID, file_number=FILE_NUMBER, secret=SECRET, dob=DOB + ) mock_http.add_response(VALID_RESPONSE) response = mdph13.dossiers(None, NAME_ID, EMAIL, link_id=str(link.pk), ip=IP) assert response['data'] @@ -443,17 +463,11 @@ def test_dossier_with_link_id_ok(mdph13, mock_http): def test_dossier_partial_failure(mdph13, mock_http): link1 = Link.objects.create( - resource=mdph13, - name_id=NAME_ID, - file_number=FILE_NUMBER, - secret=SECRET, - dob=DOB) + resource=mdph13, name_id=NAME_ID, file_number=FILE_NUMBER, secret=SECRET, dob=DOB + ) link2 = Link.objects.create( - resource=mdph13, - name_id=NAME_ID, - file_number=FILE_NUMBER + '2', - secret=SECRET, - dob=DOB) + resource=mdph13, name_id=NAME_ID, file_number=FILE_NUMBER + '2', secret=SECRET, dob=DOB + ) mock_http.add_response(VALID_RESPONSE) VALID_RESPONSE2 = json.loads(VALID_RESPONSE).copy() VALID_RESPONSE2['data']['numero'] = FILE_NUMBER + '2' @@ -479,11 +493,8 @@ def test_dossier_partial_failure(mdph13, mock_http): def test_dossier_bad_date(mdph13, mock_http): link = Link.objects.create( - resource=mdph13, - name_id=NAME_ID, - file_number=FILE_NUMBER, - secret=SECRET, - dob=DOB) + resource=mdph13, name_id=NAME_ID, file_number=FILE_NUMBER, secret=SECRET, dob=DOB + ) INVALID_RESPONSE = json.loads(VALID_RESPONSE) INVALID_RESPONSE['data']['demandes'][0]['date_demande'] = 'xxx' mock_http.add_response(json.dumps(INVALID_RESPONSE)) diff --git a/tests/test_misc.py b/tests/test_misc.py index 034457ff..91eb8c66 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -62,9 +62,13 @@ def test_log_cleaning(app, db, admin_user, settings): connector.logger.error('hello3') assert ResourceLog.objects.all().count() == 3 - url = reverse('logging-parameters', kwargs={ - 'resource_type': ContentType.objects.get_for_model(connector).id, - 'resource_pk': connector.id}) + url = reverse( + 'logging-parameters', + kwargs={ + 'resource_type': ContentType.objects.get_for_model(connector).id, + 'resource_pk': connector.id, + }, + ) app = login(app) resp = app.get(url) assert not resp.html.find('input', {'name': 'log_retention_days'}).has_attr('value') @@ -80,6 +84,7 @@ def test_log_cleaning(app, db, admin_user, settings): connector.daily() assert ResourceLog.objects.all().count() == 1 + @pytest.fixture def email_handler(): import logging @@ -105,24 +110,28 @@ def test_trace_emails(app, settings, dummy_csv_datasource, email_handler, mailou assert not mailoutbox - with patch.object(dummy_csv_datasource.__class__, - 'execute_query', - side_effect=ValueError('coin'), autospec=True): + with patch.object( + dummy_csv_datasource.__class__, 'execute_query', side_effect=ValueError('coin'), autospec=True + ): - app.get(generic_endpoint_url(connector='csvdatasource', - endpoint='query/dummy-query/', - slug=dummy_csv_datasource.slug), - status=500) + app.get( + generic_endpoint_url( + connector='csvdatasource', endpoint='query/dummy-query/', slug=dummy_csv_datasource.slug + ), + status=500, + ) assert mailoutbox[0].to == ['admin@example.net'] idx = len(mailoutbox) logging_parameters.trace_emails = 'john.doe@example.net' logging_parameters.save() - app.get(generic_endpoint_url(connector='csvdatasource', - endpoint='query/dummy-query/', - slug=dummy_csv_datasource.slug), - status=500) + app.get( + generic_endpoint_url( + connector='csvdatasource', endpoint='query/dummy-query/', slug=dummy_csv_datasource.slug + ), + status=500, + ) assert mailoutbox[0].to == ['admin@example.net'] assert mailoutbox[idx].to == ['john.doe@example.net'] @@ -140,15 +149,15 @@ def test_jsonb_migration(transactional_db): data = {'data': {'test': 1}} CsvDataSource = old_apps.get_model(app, 'CsvDataSource') - connector = CsvDataSource.objects.create(csv_file=File(StringIO(''), 't.csv'), - _dialect_options=data) + connector = CsvDataSource.objects.create(csv_file=File(StringIO(''), 't.csv'), _dialect_options=data) pk = connector.pk field = CsvDataSource._meta.get_field('_dialect_options') with connection.cursor() as cursor: cursor.execute( - 'ALTER TABLE {table} ALTER COLUMN {col} TYPE text USING {col}::text;' - .format(table=CsvDataSource._meta.db_table, col=field.get_attname_column()[1]) + 'ALTER TABLE {table} ALTER COLUMN {col} TYPE text USING {col}::text;'.format( + table=CsvDataSource._meta.db_table, col=field.get_attname_column()[1] + ) ) connector = CsvDataSource.objects.get(pk=pk) # db is in a broken state diff --git a/tests/test_nancypoll.py b/tests/test_nancypoll.py index 7b7d8468..566900e0 100644 --- a/tests/test_nancypoll.py +++ b/tests/test_nancypoll.py @@ -37,15 +37,15 @@ def parse_response(response): @pytest.fixture def setup(db): - api = ApiUser.objects.create(username='all', - keytype='', key='') - poll = NancyPoll.objects.create(csv_file=File(StringIO(data), 'data.csv'), - slug='poll') + api = ApiUser.objects.create(username='all', keytype='', key='') + poll = NancyPoll.objects.create(csv_file=File(StringIO(data), 'data.csv'), slug='poll') obj_type = ContentType.objects.get_for_model(poll) - AccessRight.objects.create(codename='can_access', apiuser=api, - resource_type=obj_type, resource_pk=poll.pk) - url = reverse('generic-endpoint', kwargs={ - 'connector': 'nancypoll', 'slug': poll.slug, 'endpoint': 'data'}) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=poll.pk + ) + url = reverse( + 'generic-endpoint', kwargs={'connector': 'nancypoll', 'slug': poll.slug, 'endpoint': 'data'} + ) return url, Client() diff --git a/tests/test_okina.py b/tests/test_okina.py index 8ca01d02..df449ca6 100644 --- a/tests/test_okina.py +++ b/tests/test_okina.py @@ -1639,20 +1639,19 @@ QRCODE_404 = '''{ "message" : "Subscriber with ID 123 not found." }''' + @pytest.fixture def okina(db): - return Okina.objects.create(slug='test', - service_url='https://okina.example.net/b2b/', - username='usertest', - password='userpass') + return Okina.objects.create( + slug='test', service_url='https://okina.example.net/b2b/', username='usertest', password='userpass' + ) def test_okina_cities(app, okina): endpoint = utils.generic_endpoint_url('okina', 'cities', slug=okina.slug) assert endpoint == '/okina/test/cities' with mock.patch('passerelle.utils.Request.get') as requests_get: - requests_get.return_value = utils.FakedResponse(content=CITIES, - status_code=200) + requests_get.return_value = utils.FakedResponse(content=CITIES, status_code=200) resp = app.get(endpoint, status=200) assert requests_get.call_count == 1 assert requests_get.call_args[0][0] == 'https://okina.example.net/b2b/cities' @@ -1665,8 +1664,7 @@ def test_okina_cities(app, okina): assert resp.json['data'][0]['text'] == 'ARDENTES (36120)' assert resp.json['data'][0]['lat'] is None - requests_get.return_value = utils.FakedResponse(content=CITIES_NEW, - status_code=200) + requests_get.return_value = utils.FakedResponse(content=CITIES_NEW, status_code=200) resp = app.get(endpoint, status=200) assert requests_get.call_count == 2 assert 'data' in resp.json @@ -1677,12 +1675,12 @@ def test_okina_cities(app, okina): assert resp.json['data'][0]['text'] == 'ARDENTES (36120)' assert resp.json['data'][0]['lat'] is None + def test_okina_classes(app, okina): endpoint = utils.generic_endpoint_url('okina', 'classes', slug=okina.slug) assert endpoint == '/okina/test/classes' with mock.patch('passerelle.utils.Request.get') as requests_get: - requests_get.return_value = utils.FakedResponse(content=CLASSES, - status_code=200) + requests_get.return_value = utils.FakedResponse(content=CLASSES, status_code=200) resp = app.get(endpoint, status=200) assert requests_get.call_count == 1 assert requests_get.call_args[0][0] == 'https://okina.example.net/b2b/classes' @@ -1691,6 +1689,7 @@ def test_okina_classes(app, okina): assert resp.json['data'][0]['id'] == '1' assert resp.json['data'][0]['text'] == u'École maternelle - Petite section' + def test_okina_institutions(app, okina): endpoint = utils.generic_endpoint_url('okina', 'institutions', slug=okina.slug) assert endpoint == '/okina/test/institutions' @@ -1707,19 +1706,25 @@ def test_okina_institutions(app, okina): assert resp.json['data'][0]['lon'] == '1.6926355997775921' resp = app.get(endpoint, params={'insee': '36005'}, status=200) - assert requests_get.call_args[0][0] == 'https://okina.example.net/b2b/institutions?inseeCode=36005' + assert ( + requests_get.call_args[0][0] == 'https://okina.example.net/b2b/institutions?inseeCode=36005' + ) assert resp.json['err'] == 0 assert len(resp.json['data']) == 2 assert resp.json['data'][0]['id'] == '277' assert resp.json['data'][0]['text'] == u'Collège Touvent' resp = app.get(endpoint + '/from-city/36005/', status=200) - assert requests_get.call_args[0][0] == 'https://okina.example.net/b2b/institutions/subscriberCity/36005' + assert ( + requests_get.call_args[0][0] + == 'https://okina.example.net/b2b/institutions/subscriberCity/36005' + ) assert resp.json['err'] == 0 assert len(resp.json['data']) == 2 assert resp.json['data'][0]['id'] == '277' assert resp.json['data'][0]['text'] == u'Collège Touvent' + def test_okina_search(app, okina): endpoint = utils.generic_endpoint_url('okina', 'search', slug=okina.slug) assert endpoint == '/okina/test/search' @@ -1731,8 +1736,9 @@ def test_okina_search(app, okina): apiuser = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(okina) - AccessRight.objects.create(codename='can_access', apiuser=apiuser, resource_type=obj_type, - resource_pk=okina.pk) + AccessRight.objects.create( + codename='can_access', apiuser=apiuser, resource_type=obj_type, resource_pk=okina.pk + ) app.get(endpoint + '?lat=46.828652', status=400) # missing argument resp = app.get(endpoint + '?lat=46.828652&lon=1.701463&institution=277', status=200) @@ -1742,7 +1748,7 @@ def test_okina_search(app, okina): "from-address": "", "from-lat": "46.828652", "from-long": "1.701463", - "institution-id": "277" + "institution-id": "277", } assert resp.json['err'] == 0 assert len(resp.json['data']) == 2 @@ -1751,31 +1757,36 @@ def test_okina_search(app, okina): assert resp.json['data'][0]['lat'] == 46.8444186 assert resp.json['data'][0]['lon'] == 1.708197 # french decimals - resp = app.get(endpoint + '?lat=46,8&lon=1,71&address=nowhere&mode=FAR_ALL&institution=280', - status=200) + resp = app.get( + endpoint + '?lat=46,8&lon=1,71&address=nowhere&mode=FAR_ALL&institution=280', status=200 + ) assert requests_post.call_args[0][0] == 'https://okina.example.net/b2b/wishes/search' assert json.loads(requests_post.call_args[1]['data']) == { "type": "FAR_ALL", "from-address": "nowhere", "from-lat": "46.8", "from-long": "1.71", - "institution-id": "280" + "institution-id": "280", } assert resp.json['err'] == 0 + def test_okina_stops_area(app, okina): endpoint = utils.generic_endpoint_url('okina', 'stop-areas', slug=okina.slug) assert endpoint == '/okina/test/stop-areas' with mock.patch('passerelle.utils.Request.get') as requests_get: - requests_get.return_value = utils.FakedResponse(content=STOPS, - status_code=200) + requests_get.return_value = utils.FakedResponse(content=STOPS, status_code=200) resp = app.get(endpoint + '/from-city/36005/to-institution/276/', status=200) - assert requests_get.call_args[0][0] == 'https://okina.example.net/b2b/stop-areas/subscriberCity/36005/institution/276' + assert ( + requests_get.call_args[0][0] + == 'https://okina.example.net/b2b/stop-areas/subscriberCity/36005/institution/276' + ) assert resp.json['err'] == 0 assert len(resp.json['data']) == 3 assert resp.json['data'][0]['id'] == '3281' assert resp.json['data'][0]['text'] == u'Les Loges de Dressais' + def test_okina_ods(app, okina): endpoint = utils.generic_endpoint_url('okina', 'origin-destinations', slug=okina.slug) assert endpoint == '/okina/test/origin-destinations' @@ -1803,7 +1814,9 @@ def test_okina_ods(app, okina): requests_get.return_value = utils.FakedResponse(content=ODS_LINES, status_code=200) resp = app.get(endpoint + '/from-stop-area/3282/to-institution/276/', status=200) - assert requests_get.call_args[0][0] == 'https://okina.example.net/b2b/ods/institution/276/stop-area/3282' + assert ( + requests_get.call_args[0][0] == 'https://okina.example.net/b2b/ods/institution/276/stop-area/3282' + ) assert resp.json['err'] == 0 assert len(resp.json['data']) == 3 assert resp.json['data'][0]['id'] == '1' @@ -1826,11 +1839,16 @@ def test_okina_ods(app, okina): requests_get.return_value = utils.FakedResponse(content=ODS_FULL, status_code=200) resp = app.get(endpoint + '/from-city/36005/to-institution/276/', status=200) - assert requests_get.call_args[0][0] == 'https://okina.example.net/b2b/ods/institution/276/subscriberCity/36005' + assert ( + requests_get.call_args[0][0] + == 'https://okina.example.net/b2b/ods/institution/276/subscriberCity/36005' + ) assert resp.json['err'] == 0 assert len(resp.json['data']) == 1 assert resp.json['data'][0]['id'] == 'inst:276-seq:1-6-84' - assert resp.json['data'][0]['text'] == u'Brenne 1 vers écoles Antoine Fée, Saint Martin et Saint Vincent' + assert ( + resp.json['data'][0]['text'] == u'Brenne 1 vers écoles Antoine Fée, Saint Martin et Saint Vincent' + ) assert resp.json['data'][0]['identifier'] == 'SYNDICAT ARDENTES 4 (semaine Aller)' assert resp.json['data'][0]['vehicle_journey_id'] == '84' @@ -1840,10 +1858,13 @@ def test_okina_ods(app, okina): assert resp.json['err'] == 0 assert len(resp.json['data']) == 1 assert resp.json['data'][0]['id'] == 'inst:276-seq:1-6-84' - assert resp.json['data'][0]['text'] == u'Brenne 1 vers écoles Antoine Fée, Saint Martin et Saint Vincent' + assert ( + resp.json['data'][0]['text'] == u'Brenne 1 vers écoles Antoine Fée, Saint Martin et Saint Vincent' + ) assert resp.json['data'][0]['identifier'] == 'SYNDICAT ARDENTES 4 (semaine Aller)' assert resp.json['data'][0]['vehicle_journey_id'] == '84' + def test_okina_topology(app, okina): endpoint = utils.generic_endpoint_url('okina', 'topology', slug=okina.slug) assert endpoint == '/okina/test/topology' @@ -1872,19 +1893,20 @@ def test_okina_topology(app, okina): assert resp.json['data'][0]['id'] == '306' assert resp.json['data'][0]['text'] == u'019 02 - VELLES LYCEES DE CHATEAUROUX' + def test_okina_errors(app, okina): with mock.patch('passerelle.utils.Request.get') as requests_get: - for response in ((200, '{"foo": "bar"}'), # not a list - (200, '{["foo": "bar"]}'), # list of malformatted dict - (200, '

      Welcome

      '), # bad json - (403, '

      Bad creds

      '), - (404, '

      Not Found

      '), - (400, '{"message": "bad request"}'), - (400, '[{"message": "bad request"}]'), - (500, '

      Crash

      '), - ): - requests_get.return_value = utils.FakedResponse(content=response[1], - status_code=response[0]) + for response in ( + (200, '{"foo": "bar"}'), # not a list + (200, '{["foo": "bar"]}'), # list of malformatted dict + (200, '

      Welcome

      '), # bad json + (403, '

      Bad creds

      '), + (404, '

      Not Found

      '), + (400, '{"message": "bad request"}'), + (400, '[{"message": "bad request"}]'), + (500, '

      Crash

      '), + ): + requests_get.return_value = utils.FakedResponse(content=response[1], status_code=response[0]) resp = app.get('/okina/test/cities', status=200) assert resp.json['err'] == 1 if resp.json['data'] and 'status_code' in resp.json['data']: @@ -1892,12 +1914,17 @@ def test_okina_errors(app, okina): # "normal" 401/403 response, ie problem with login/password for status_code in (401, 403): - requests_get.return_value = utils.FakedResponse(content='''{"message": "Invalid credentials", - "code": 4, "status" : %d}''' % status_code, status_code=status_code) + requests_get.return_value = utils.FakedResponse( + content='''{"message": "Invalid credentials", + "code": 4, "status" : %d}''' + % status_code, + status_code=status_code, + ) resp = app.get('/okina/test/cities', status=200) assert resp.json['err'] == 1 assert resp.json['err_desc'].startswith("Invalid credentials") + def test_okina_suscribe(app, okina): for service in ('subscriber', 'subscription'): @@ -1911,8 +1938,9 @@ def test_okina_suscribe(app, okina): # open access api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(okina) - AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, - resource_pk=okina.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=okina.pk + ) for service in ('subscriber', 'subscription'): endpoint = utils.generic_endpoint_url('okina', service, slug=okina.slug) assert endpoint == '/okina/test/%s' % service @@ -1930,7 +1958,7 @@ def test_okina_suscribe(app, okina): assert resp.json['err_desc'] == 'payload must be a dict' requests_get.return_value = utils.FakedResponse(content='{"foo":"bar"}', status_code=200) - resp = app.post_json(endpoint, params={'x':'y'}) + resp = app.post_json(endpoint, params={'x': 'y'}) assert requests_get.call_count == 1 assert resp.json['err'] == 0 assert resp.json['data'] == {'foo': 'bar'} @@ -1950,25 +1978,27 @@ def test_okina_suscribe(app, okina): # qrcode endpoint = '/okina/test/subscriber/123/qrcode' with mock.patch('passerelle.utils.Request.get') as requests_get: - requests_get.return_value = utils.FakedResponse(content='PNGfoo', - headers={'Content-Type': 'image/png'}, - status_code=200) + requests_get.return_value = utils.FakedResponse( + content='PNGfoo', headers={'Content-Type': 'image/png'}, status_code=200 + ) resp = app.get(endpoint) assert requests_get.call_count == 1 assert resp.headers['content-type'] == 'image/png' assert resp.content == b'PNGfoo' - requests_get.return_value = utils.FakedResponse(content=QRCODE_400, - headers={'Content-Type': 'application/json' }, - status_code=400) + requests_get.return_value = utils.FakedResponse( + content=QRCODE_400, headers={'Content-Type': 'application/json'}, status_code=400 + ) resp = app.get(endpoint, status=200) assert requests_get.call_count == 2 assert resp.json['err'] == 1 - assert resp.json['err_desc'].startswith('Subscriber with ID 123 has no passcard number to generate qr code from.') + assert resp.json['err_desc'].startswith( + 'Subscriber with ID 123 has no passcard number to generate qr code from.' + ) - requests_get.return_value = utils.FakedResponse(content=QRCODE_404, - headers={'Content-Type': 'application/json' }, - status_code=404) + requests_get.return_value = utils.FakedResponse( + content=QRCODE_404, headers={'Content-Type': 'application/json'}, status_code=404 + ) resp = app.get(endpoint, status=200) assert requests_get.call_count == 3 assert resp.json['err'] == 1 diff --git a/tests/test_opendatasoft.py b/tests/test_opendatasoft.py index 308caea8..1f435235 100644 --- a/tests/test_opendatasoft.py +++ b/tests/test_opendatasoft.py @@ -31,139 +31,109 @@ from test_manager import login pytestmark = pytest.mark.django_db -FAKED_CONTENT_Q_SEARCH = json.dumps({ - "nhits": 76, - "parameters": { - "dataset": "referentiel-adresse-test", - "format": "json", - "q": "rue de l'aubepine", - "rows": 3, - "timezone": "UTC" - }, - "records": [ - { - "datasetid": "referentiel-adresse-test", - "fields": { - "adresse_complete": "33 RUE DE L'AUBEPINE STRASBOURG", - "date_exprt": "2019-10-23", - "geo_point": [ - 48.6060963542, - 7.76978279836 - ], - "nom_commun": "Strasbourg", - "nom_rue": "RUE DE L'AUBEPINE", - "num_com": 482, - "numero": "33", - "source": u"Ville et Eurométropole de Strasbourg" - }, - "geometry": { - "coordinates": [ - 7.76978279836, - 48.6060963542 - ], - "type": "Point" - }, - "record_timestamp": "2019-12-02T14:15:08.376000+00:00", - "recordid": "e00cf6161e52a4c8fe510b2b74d4952036cb3473" +FAKED_CONTENT_Q_SEARCH = json.dumps( + { + "nhits": 76, + "parameters": { + "dataset": "referentiel-adresse-test", + "format": "json", + "q": "rue de l'aubepine", + "rows": 3, + "timezone": "UTC", }, - { - "datasetid": "referentiel-adresse-test", - "fields": { - "adresse_complete": "19 RUE DE L'AUBEPINE LIPSHEIM", - "date_exprt": "2019-10-23", - "geo_point": [ - 48.4920620548, - 7.66177412454 - ], - "nom_commun": "Lipsheim", - "nom_rue": "RUE DE L'AUBEPINE", - "num_com": 268, - "numero": "19", - "source": u"Ville et Eurométropole de Strasbourg" + "records": [ + { + "datasetid": "referentiel-adresse-test", + "fields": { + "adresse_complete": "33 RUE DE L'AUBEPINE STRASBOURG", + "date_exprt": "2019-10-23", + "geo_point": [48.6060963542, 7.76978279836], + "nom_commun": "Strasbourg", + "nom_rue": "RUE DE L'AUBEPINE", + "num_com": 482, + "numero": "33", + "source": u"Ville et Eurométropole de Strasbourg", + }, + "geometry": {"coordinates": [7.76978279836, 48.6060963542], "type": "Point"}, + "record_timestamp": "2019-12-02T14:15:08.376000+00:00", + "recordid": "e00cf6161e52a4c8fe510b2b74d4952036cb3473", }, - "geometry": { - "coordinates": [ - 7.66177412454, - 48.4920620548 - ], - "type": "Point" + { + "datasetid": "referentiel-adresse-test", + "fields": { + "adresse_complete": "19 RUE DE L'AUBEPINE LIPSHEIM", + "date_exprt": "2019-10-23", + "geo_point": [48.4920620548, 7.66177412454], + "nom_commun": "Lipsheim", + "nom_rue": "RUE DE L'AUBEPINE", + "num_com": 268, + "numero": "19", + "source": u"Ville et Eurométropole de Strasbourg", + }, + "geometry": {"coordinates": [7.66177412454, 48.4920620548], "type": "Point"}, + "record_timestamp": "2019-12-02T14:15:08.376000+00:00", + "recordid": "7cafcd5c692773e8b863587b2d38d6be82e023d8", }, - "record_timestamp": "2019-12-02T14:15:08.376000+00:00", - "recordid": "7cafcd5c692773e8b863587b2d38d6be82e023d8" - }, - { - "datasetid": "referentiel-adresse-test", - "fields": { - "adresse_complete": "29 RUE DE L'AUBEPINE STRASBOURG", - "date_exprt": "2019-10-23", - "geo_point": [ - 48.6056497224, - 7.76988497729 - ], - "nom_commun": "Strasbourg", - "nom_rue": "RUE DE L'AUBEPINE", - "num_com": 482, - "numero": "29", - "source": u"Ville et Eurométropole de Strasbourg" + { + "datasetid": "referentiel-adresse-test", + "fields": { + "adresse_complete": "29 RUE DE L'AUBEPINE STRASBOURG", + "date_exprt": "2019-10-23", + "geo_point": [48.6056497224, 7.76988497729], + "nom_commun": "Strasbourg", + "nom_rue": "RUE DE L'AUBEPINE", + "num_com": 482, + "numero": "29", + "source": u"Ville et Eurométropole de Strasbourg", + }, + "geometry": {"coordinates": [7.76988497729, 48.6056497224], "type": "Point"}, + "record_timestamp": "2019-12-02T14:15:08.376000+00:00", + "recordid": "0984a5e1745701f71c91af73ce764e1f7132e0ff", }, - "geometry": { - "coordinates": [ - 7.76988497729, - 48.6056497224 - ], - "type": "Point" - }, - "record_timestamp": "2019-12-02T14:15:08.376000+00:00", - "recordid": "0984a5e1745701f71c91af73ce764e1f7132e0ff" - } - ] -}) + ], + } +) -FAKED_CONTENT_ID_SEARCH = json.dumps({ - "nhits": 1, - "parameters": { - "dataset": "referentiel-adresse-test", - "format": "json", - "q": "recordid:7cafcd5c692773e8b863587b2d38d6be82e023d8", - "rows": 1, - "timezone": "UTC" - }, - "records": [ - { - "datasetid": "referentiel-adresse-test", - "fields": { - "adresse_complete": "19 RUE DE L'AUBEPINE LIPSHEIM", - "date_exprt": "2019-10-23", - "geo_point": [ - 48.4920620548, - 7.66177412454 - ], - "nom_commun": "Lipsheim", - "nom_rue": "RUE DE L'AUBEPINE", - "num_com": 268, - "numero": "19", - u"source": "Ville et Eurométropole de Strasbourg" - }, - "geometry": { - "coordinates": [ - 7.66177412454, - 48.4920620548 - ], - "type": "Point" - }, - "record_timestamp": "2019-12-02T14:15:08.376000+00:00", - "recordid": "7cafcd5c692773e8b863587b2d38d6be82e023d8" - } - ] -}) +FAKED_CONTENT_ID_SEARCH = json.dumps( + { + "nhits": 1, + "parameters": { + "dataset": "referentiel-adresse-test", + "format": "json", + "q": "recordid:7cafcd5c692773e8b863587b2d38d6be82e023d8", + "rows": 1, + "timezone": "UTC", + }, + "records": [ + { + "datasetid": "referentiel-adresse-test", + "fields": { + "adresse_complete": "19 RUE DE L'AUBEPINE LIPSHEIM", + "date_exprt": "2019-10-23", + "geo_point": [48.4920620548, 7.66177412454], + "nom_commun": "Lipsheim", + "nom_rue": "RUE DE L'AUBEPINE", + "num_com": 268, + "numero": "19", + u"source": "Ville et Eurométropole de Strasbourg", + }, + "geometry": {"coordinates": [7.66177412454, 48.4920620548], "type": "Point"}, + "record_timestamp": "2019-12-02T14:15:08.376000+00:00", + "recordid": "7cafcd5c692773e8b863587b2d38d6be82e023d8", + } + ], + } +) @pytest.fixture def connector(): - return utils.setup_access_rights(OpenDataSoft.objects.create( - slug='my_connector', - api_key='my_secret', - )) + return utils.setup_access_rights( + OpenDataSoft.objects.create( + slug='my_connector', + api_key='my_secret', + ) + ) @pytest.fixture @@ -231,14 +201,16 @@ def test_search_using_q(mocked_get, app, connector): assert len(resp.json['data']) == 3 # check order is kept assert [x['id'] for x in resp.json['data']] == [ - 'e00cf6161e52a4c8fe510b2b74d4952036cb3473', - '7cafcd5c692773e8b863587b2d38d6be82e023d8', - '0984a5e1745701f71c91af73ce764e1f7132e0ff'] + 'e00cf6161e52a4c8fe510b2b74d4952036cb3473', + '7cafcd5c692773e8b863587b2d38d6be82e023d8', + '0984a5e1745701f71c91af73ce764e1f7132e0ff', + ] # check text results assert [x['text'] for x in resp.json['data']] == [ - "33 RUE DE L'AUBEPINE Strasbourg", - "19 RUE DE L'AUBEPINE Lipsheim", - "29 RUE DE L'AUBEPINE Strasbourg"] + "33 RUE DE L'AUBEPINE Strasbourg", + "19 RUE DE L'AUBEPINE Lipsheim", + "29 RUE DE L'AUBEPINE Strasbourg", + ] # check additional attributes assert [x['numero'] for x in resp.json['data']] == ['33', '19', '29'] @@ -271,14 +243,16 @@ def test_query_q_using_q(mocked_get, app, query): assert len(resp.json['data']) == 3 # check order is kept assert [x['id'] for x in resp.json['data']] == [ - 'e00cf6161e52a4c8fe510b2b74d4952036cb3473', - '7cafcd5c692773e8b863587b2d38d6be82e023d8', - '0984a5e1745701f71c91af73ce764e1f7132e0ff'] + 'e00cf6161e52a4c8fe510b2b74d4952036cb3473', + '7cafcd5c692773e8b863587b2d38d6be82e023d8', + '0984a5e1745701f71c91af73ce764e1f7132e0ff', + ] # check text results assert [x['text'] for x in resp.json['data']] == [ - "33 RUE DE L'AUBEPINE Strasbourg", - "19 RUE DE L'AUBEPINE Lipsheim", - "29 RUE DE L'AUBEPINE Strasbourg"] + "33 RUE DE L'AUBEPINE Strasbourg", + "19 RUE DE L'AUBEPINE Lipsheim", + "29 RUE DE L'AUBEPINE Strasbourg", + ] # check additional attributes assert [x['numero'] for x in resp.json['data']] == ['33', '19', '29'] diff --git a/tests/test_opengis.py b/tests/test_opengis.py index 45155263..d4e4ad57 100644 --- a/tests/test_opengis.py +++ b/tests/test_opengis.py @@ -264,13 +264,7 @@ FAKE_GEOLOCATED_FEATURE_CIRCLE = { 'in-circle': False, 'in-bbox': False, }, - 'geometry': { - 'type': 'Point', - 'coordinates': [ - 2.3555374145507812, - 48.906705448392216 - ] - } + 'geometry': {'type': 'Point', 'coordinates': [2.3555374145507812, 48.906705448392216]}, }, { 'type': 'Feature', @@ -278,13 +272,7 @@ FAKE_GEOLOCATED_FEATURE_CIRCLE = { 'in-circle': True, 'in-bbox': True, }, - 'geometry': { - 'type': 'Point', - 'coordinates': [ - 2.3366546630859375, - 48.86990906900767 - ] - } + 'geometry': {'type': 'Point', 'coordinates': [2.3366546630859375, 48.86990906900767]}, }, { 'type': 'Feature', @@ -292,13 +280,7 @@ FAKE_GEOLOCATED_FEATURE_CIRCLE = { 'in-circle': True, 'in-bbox': True, }, - 'geometry': { - 'type': 'Point', - 'coordinates': [ - 2.344207763671875, - 48.82901755447848 - ] - } + 'geometry': {'type': 'Point', 'coordinates': [2.344207763671875, 48.82901755447848]}, }, { 'type': 'Feature', @@ -306,24 +288,19 @@ FAKE_GEOLOCATED_FEATURE_CIRCLE = { 'in-circle': False, 'in-bbox': True, }, - 'geometry': { - 'type': 'Point', - 'coordinates': [ - 2.304, - 48.8086 - ] - } - } - ] + 'geometry': {'type': 'Point', 'coordinates': [2.304, 48.8086]}, + }, + ], } @pytest.fixture def connector(): - return utils.setup_access_rights(OpenGIS.objects.create( - slug='test', - wms_service_url='http://example.net/wms', - wfs_service_url='http://example.net/wfs')) + return utils.setup_access_rights( + OpenGIS.objects.create( + slug='test', wms_service_url='http://example.net/wms', wfs_service_url='http://example.net/wfs' + ) + ) @pytest.fixture @@ -334,8 +311,10 @@ def query(connector): slug='test_query', description='Test query.', typename='pvo_patrimoine_voirie.pvoparking', - filter_expression=('typeparking' - '') + filter_expression=( + 'typeparking' + '' + ), ) @@ -345,12 +324,14 @@ def geoserver_responses(url, **kwargs): return utils.FakedResponse(status_code=200, content=FAKE_SERVICE_CAPABILITIES) return utils.FakedResponse(status_code=200, content=FAKE_FEATURES_JSON) + def geoserver_responses_v1_0_0(url, **kwargs): if kwargs['params'].get('request') == 'GetCapabilities': assert kwargs['params'].get('service') return utils.FakedResponse(status_code=200, content=FAKE_SERVICE_CAPABILITIES_V1_0_0) return utils.FakedResponse(status_code=200, content=FAKE_FEATURES_JSON) + def geoserver_responses_errors(url, **kwargs): if kwargs['params'].get('request') == 'GetCapabilities': return utils.FakedResponse(status_code=200, content=FAKE_SERVICE_CAPABILITIES) @@ -381,13 +362,17 @@ def test_feature_info(mocked_get, app, connector): assert endpoint == '/opengis/test/feature_info' mocked_get.return_value = utils.FakedResponse(content=FAKE_FEATURE_INFO, status_code=200) resp = app.get(endpoint, params={'lat': '45.796890', 'lon': '4.784140'}) - assert mocked_get.call_args[1]['params']['bbox'] == '532556.896735,5747844.261214,532579.160633,5747876.194333' + assert ( + mocked_get.call_args[1]['params']['bbox'] + == '532556.896735,5747844.261214,532579.160633,5747876.194333' + ) assert mocked_get.call_args[1]['params']['crs'] == 'EPSG:3857' - assert (resp.json['data'] - ['cad_cadastrecadparcelle_layer'] - ['cad_cadastrecadparcelle_feature'] - ['natureproprietaire'] - == 'Particulier') + assert ( + resp.json['data']['cad_cadastrecadparcelle_layer']['cad_cadastrecadparcelle_feature'][ + 'natureproprietaire' + ] + == 'Particulier' + ) connector.projection = 'EPSG:4326' connector.save() resp = app.get(endpoint, params={'lat': '45.796890', 'lon': '4.784140'}) @@ -396,10 +381,13 @@ def test_feature_info(mocked_get, app, connector): @mock.patch('passerelle.utils.Request.get') -@pytest.mark.parametrize('lat,lon', [ - ('bad-value', '4.784140'), - ('45.796890', 'bad-value'), -]) +@pytest.mark.parametrize( + 'lat,lon', + [ + ('bad-value', '4.784140'), + ('45.796890', 'bad-value'), + ], +) def test_feature_info_bad_request(mocked_get, app, connector, lat, lon): endpoint = utils.generic_endpoint_url('opengis', 'feature_info', slug=connector.slug) assert endpoint == '/opengis/test/feature_info' @@ -416,7 +404,10 @@ def test_tile(mocked_get, app, connector): mocked_get.return_value = utils.FakedResponse(content=b'\x89PNG\r\n\x1a\n\x00\x00...', status_code=200) resp = app.get(endpoint + '/16/33650/23378.png') assert mocked_get.call_args[1]['params']['crs'] == 'EPSG:3857' - assert mocked_get.call_args[1]['params']['bbox'] == '539339.671580,5741338.068556,539951.167806,5741949.564782' + assert ( + mocked_get.call_args[1]['params']['bbox'] + == '539339.671580,5741338.068556,539951.167806,5741949.564782' + ) connector.projection = 'EPSG:4326' connector.save() resp = app.get(endpoint + '/16/33650/23378.png') @@ -459,12 +450,14 @@ def test_get_feature(mocked_get, app, connector): def test_get_filtered_feature(mocked_get, app, connector): endpoint = utils.generic_endpoint_url('opengis', 'features', slug=connector.slug) mocked_get.side_effect = geoserver_responses - app.get(endpoint, - params={ - 'type_names': 'ref_metro_limites_communales', - 'property_name': 'nom', - 'cql_filter': 'nom=\'Fontaine\'' - }) + app.get( + endpoint, + params={ + 'type_names': 'ref_metro_limites_communales', + 'property_name': 'nom', + 'cql_filter': 'nom=\'Fontaine\'', + }, + ) assert mocked_get.call_args[1]['params']['cql_filter'] == 'nom=\'Fontaine\'' @@ -472,9 +465,12 @@ def test_get_filtered_feature(mocked_get, app, connector): def test_get_filtered_by_property_feature(mocked_get, app, connector): endpoint = utils.generic_endpoint_url('opengis', 'features', slug=connector.slug) mocked_get.side_effect = geoserver_responses - params = {'type_names': 'ref_metro_limites_communales', - 'property_name': 'nom', 'cql_filter': 'nom=\'Fontaine\'', - 'filter_property_name': 'nom'} + params = { + 'type_names': 'ref_metro_limites_communales', + 'property_name': 'nom', + 'cql_filter': 'nom=\'Fontaine\'', + 'filter_property_name': 'nom', + } app.get(endpoint, params=params) assert mocked_get.call_args[1]['params']['cql_filter'] == 'nom=\'Fontaine\'' params['q'] = 'bens' @@ -496,10 +492,7 @@ def test_get_feature_error(mocked_get, app, connector): endpoint = utils.generic_endpoint_url('opengis', 'features', slug=connector.slug) assert endpoint == '/opengis/test/features' mocked_get.side_effect = geoserver_responses_errors - resp = app.get(endpoint, params={ - 'type_names': 'ref_metro_limites_communales', - 'property_name': 'nom' - }) + resp = app.get(endpoint, params={'type_names': 'ref_metro_limites_communales', 'property_name': 'nom'}) assert mocked_get.call_args[1]['params']['request'] == 'GetFeature' assert mocked_get.call_args[1]['params']['propertyName'] == 'nom' assert mocked_get.call_args[1]['params']['typenames'] == 'ref_metro_limites_communales' @@ -517,10 +510,7 @@ def test_get_feature_error2(mocked_get, app, connector): endpoint = utils.generic_endpoint_url('opengis', 'features', slug=connector.slug) assert endpoint == '/opengis/test/features' mocked_get.side_effect = geoserver_responses_errors_unparsable - resp = app.get(endpoint, params={ - 'type_names': 'ref_metro_limites_communales', - 'property_name': 'nom' - }) + resp = app.get(endpoint, params={'type_names': 'ref_metro_limites_communales', 'property_name': 'nom'}) assert mocked_get.call_args[1]['params']['request'] == 'GetFeature' assert mocked_get.call_args[1]['params']['propertyName'] == 'nom' assert mocked_get.call_args[1]['params']['typenames'] == 'ref_metro_limites_communales' @@ -533,9 +523,10 @@ def test_get_feature_error2(mocked_get, app, connector): assert ' 502)', - 'GET http://tcl.example.net/tclpassagearret (=> 502)'), - ('GET https://tcl.example.net/tclpassagearret (=> 502)', - 'GET https://tcl.example.net/tclpassagearret (=> 502)'), - ('GET http://username:password@tcl.example.net/tclpassagearret (=> 502)', - 'GET http://***:***@tcl.example.net/tclpassagearret (=> 502)'), - ('GET https://username:password@tcl.example.net/tclpassagearret (=> 502)', - 'GET https://***:***@tcl.example.net/tclpassagearret (=> 502)'), -]) -def test_proxy_logger_hide_url_credentials(db, settings, email_handler, mailoutbox, - connector, error_msg, expected): +@pytest.mark.parametrize( + 'error_msg, expected', + [ + ('Foo Bar', 'Foo Bar'), + ('http://badurl', 'http://badurl'), + ( + 'GET http://tcl.example.net/tclpassagearret (=> 502)', + 'GET http://tcl.example.net/tclpassagearret (=> 502)', + ), + ( + 'GET https://tcl.example.net/tclpassagearret (=> 502)', + 'GET https://tcl.example.net/tclpassagearret (=> 502)', + ), + ( + 'GET http://username:password@tcl.example.net/tclpassagearret (=> 502)', + 'GET http://***:***@tcl.example.net/tclpassagearret (=> 502)', + ), + ( + 'GET https://username:password@tcl.example.net/tclpassagearret (=> 502)', + 'GET https://***:***@tcl.example.net/tclpassagearret (=> 502)', + ), + ], +) +def test_proxy_logger_hide_url_credentials( + db, settings, email_handler, mailoutbox, connector, error_msg, expected +): settings.ADMINS = [('admin', 'admin@example.net')] pr = ProxyLogger(connector) @@ -136,7 +148,14 @@ def test_validate_notification_delays(db, connector): availability_parameters.notification_delays = '5,100,1000' availability_parameters.full_clean() assert not availability_parameters.has_zero_delay() - assert take(availability_parameters.notification_delays_generator(), 6) == [5, 100, 1000, 2000, 3000, 4000] + assert take(availability_parameters.notification_delays_generator(), 6) == [ + 5, + 100, + 1000, + 2000, + 3000, + 4000, + ] availability_parameters.notification_delays = '5' availability_parameters.full_clean() @@ -185,8 +204,7 @@ def test_log_on_connector_availability_change(db, connector, freezer, notificati assert last_count1 == 2 assert ResourceLog.objects.all()[0].message == 'GET http://example.net/ (=> 404)' assert ResourceLog.objects.all()[1].level == 'error' if notification_delays.startswith('0') else 'warning' - assert (u'connector "éléphant" (Feed) is now down: 404 Client' - in ResourceLog.objects.all()[1].message) + assert u'connector "éléphant" (Feed) is now down: 404 Client' in ResourceLog.objects.all()[1].message # move 5 minutes in the future freezer.move_to(datetime.timedelta(seconds=60 * 5 + 1)) @@ -201,8 +219,10 @@ def test_log_on_connector_availability_change(db, connector, freezer, notificati else: assert last_count2 == last_count1 + 1 assert ResourceLog.objects.all()[2].level == 'error' if notification_delays != '0' else 'warning' - assert (u'connector "éléphant" (Feed) has been down for 5 minutes: 404' - in ResourceLog.objects.all()[2].message) + assert ( + u'connector "éléphant" (Feed) has been down for 5 minutes: 404' + in ResourceLog.objects.all()[2].message + ) # move 3 minutes in the future freezer.move_to(datetime.timedelta(seconds=60 * 3 + 1)) @@ -268,9 +288,7 @@ def test_logged_requests_and_responses_max_size(app, db, monkeypatch, settings): @endpoint(methods=['post']) def httpcall(self, request): - connector_payload = { - 'connector_query_var': '2' * 20 - } + connector_payload = {'connector_query_var': '2' * 20} self.requests.post(URL, connector_payload) raise APIError({'connector_error_var': '4' * 20}) @@ -290,8 +308,7 @@ def test_logged_requests_and_responses_max_size(app, db, monkeypatch, settings): assert len(ResourceLog.objects.all()) == 3 # - user POST query - assert ResourceLog.objects.all()[0].extra['connector_payload'] == \ - 'user_query_var=11111111111111111111' + assert ResourceLog.objects.all()[0].extra['connector_payload'] == 'user_query_var=11111111111111111111' # - connector POST queries assert not ResourceLog.objects.all()[1].extra.get('request_payload') @@ -300,13 +317,12 @@ def test_logged_requests_and_responses_max_size(app, db, monkeypatch, settings): # - connector error if PY2: - assert ResourceLog.objects.all()[2].extra['body'] == \ - "'user_query_var=11111111111111111111'" + assert ResourceLog.objects.all()[2].extra['body'] == "'user_query_var=11111111111111111111'" else: - assert ResourceLog.objects.all()[2].extra['body'] == \ - "b'user_query_var=11111111111111111111'" - assert ResourceLog.objects.all()[2].extra['exception'] == \ - "{'connector_error_var': '44444444444444444444'}" + assert ResourceLog.objects.all()[2].extra['body'] == "b'user_query_var=11111111111111111111'" + assert ( + ResourceLog.objects.all()[2].extra['exception'] == "{'connector_error_var': '44444444444444444444'}" + ) # log connector payload and service response connector.set_log_level('DEBUG') @@ -317,16 +333,20 @@ def test_logged_requests_and_responses_max_size(app, db, monkeypatch, settings): assert len(ResourceLog.objects.all()) == 3 # - connector POST queries - assert ResourceLog.objects.all()[1].extra['request_payload'] == \ - "'connector_query_var=22222222222222222222'" - assert ResourceLog.objects.all()[1].extra.get('response_headers') == \ - {'Content-Type': 'foo/bar'} + assert ( + ResourceLog.objects.all()[1].extra['request_payload'] == "'connector_query_var=22222222222222222222'" + ) + assert ResourceLog.objects.all()[1].extra.get('response_headers') == {'Content-Type': 'foo/bar'} if PY2: - assert ResourceLog.objects.all()[1].extra.get('response_content') == \ - '\'{"service_reply_var": "33333333333333333333"}\'' + assert ( + ResourceLog.objects.all()[1].extra.get('response_content') + == '\'{"service_reply_var": "33333333333333333333"}\'' + ) else: - assert ResourceLog.objects.all()[1].extra.get('response_content') == \ - 'b\'{"service_reply_var": "33333333333333333333"}\'' + assert ( + ResourceLog.objects.all()[1].extra.get('response_content') + == 'b\'{"service_reply_var": "33333333333333333333"}\'' + ) # log troncated payloads parameters = connector.logging_parameters @@ -338,20 +358,16 @@ def test_logged_requests_and_responses_max_size(app, db, monkeypatch, settings): assert len(ResourceLog.objects.all()) == 3 # - user POST query - assert ResourceLog.objects.all()[0].extra['connector_payload'] == \ - 'user_query_var=1111111111' + assert ResourceLog.objects.all()[0].extra['connector_payload'] == 'user_query_var=1111111111' # - connector POST queries - assert ResourceLog.objects.all()[1].extra['request_payload'] == \ - "'connector_query_var=22222'" + assert ResourceLog.objects.all()[1].extra['request_payload'] == "'connector_query_var=22222'" # - connector error if PY2: - assert ResourceLog.objects.all()[2].extra['body'] == \ - "'user_query_var=1111111111'" + assert ResourceLog.objects.all()[2].extra['body'] == "'user_query_var=1111111111'" else: - assert ResourceLog.objects.all()[2].extra['body'] == \ - "b'user_query_var=1111111111'" + assert ResourceLog.objects.all()[2].extra['body'] == "b'user_query_var=1111111111'" # log troncated service response parameters = connector.logging_parameters @@ -364,8 +380,6 @@ def test_logged_requests_and_responses_max_size(app, db, monkeypatch, settings): # - connector POST queries if PY2: - assert ResourceLog.objects.all()[1].extra.get('response_content') == \ - '\'{"service_reply_var": "33\'' + assert ResourceLog.objects.all()[1].extra.get('response_content') == '\'{"service_reply_var": "33\'' else: - assert ResourceLog.objects.all()[1].extra.get('response_content') == \ - 'b\'{"service_reply_var": "33\'' + assert ResourceLog.objects.all()[1].extra.get('response_content') == 'b\'{"service_reply_var": "33\'' diff --git a/tests/test_requests.py b/tests/test_requests.py index 88058de8..e2a96770 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -15,7 +15,6 @@ import utils from utils import FakedResponse - class MockFileField(object): def __init__(self, path): self.path = path @@ -35,15 +34,20 @@ class MockResource(object): def log_level(request): return request.param + @urlmatch(netloc=r'(.*\.)?httpbin\.org$') def httpbin_mock(url, request): - return response(200, {"message": "Are you really josh ?"}, - headers={"Content-Type": "application/json"}, request=request) + return response( + 200, + {"message": "Are you really josh ?"}, + headers={"Content-Type": "application/json"}, + request=request, + ) + @urlmatch(netloc=r'(.*\.)?httperror\.org$') def http400_mock(url, request): - return response(400, {"foo": "bar"}, - headers={"Content-Type": "application/json"}, request=request) + return response(400, {"foo": "bar"}, headers={"Content-Type": "application/json"}, request=request) def test_log_level(caplog, log_level): @@ -54,7 +58,7 @@ def test_log_level(caplog, log_level): with HTTMock(httpbin_mock): requests = Request(logger=logger) - response = requests.post(url, json={'name':'josh'}) + response = requests.post(url, json={'name': 'josh'}) records = [record for record in caplog.records if record.name == 'requests'] @@ -77,6 +81,7 @@ def test_log_level(caplog, log_level): assert not hasattr(record, 'response_content') assert not hasattr(record, 'response_headers') + def test_log_error(caplog, log_level): url = 'https://httperror.org/plop' @@ -85,7 +90,7 @@ def test_log_error(caplog, log_level): with HTTMock(http400_mock): requests = Request(logger=logger) - response = requests.post(url, json={'name':'josh'}) + response = requests.post(url, json={'name': 'josh'}) records = [record for record in caplog.records if record.name == 'requests'] @@ -108,6 +113,7 @@ def test_log_error(caplog, log_level): assert not hasattr(record, 'response_content') assert not hasattr(record, 'response_headers') + def test_log_error_http_max_sizes(caplog, log_level, settings): url = 'https://httperror.org/plop' @@ -120,7 +126,7 @@ def test_log_error_http_max_sizes(caplog, log_level, settings): settings.LOGGED_RESPONSES_MAX_SIZE = 7 with HTTMock(http400_mock): requests = Request(logger=logger) - response = requests.post(url, json={'name':'josh'}) + response = requests.post(url, json={'name': 'josh'}) if logger.level == 10: # DEBUG records = [record for record in caplog.records if record.name == 'requests'] @@ -131,22 +137,35 @@ def test_log_error_http_max_sizes(caplog, log_level, settings): @pytest.fixture(params=['xml', 'whatever', 'jpeg', 'pdf']) def endpoint_response(request): response_request = mock.Mock( - headers={'Accept': '*/*', 'Authorization': 'Basic dG9rZW46dG9rZW4='}, body=None) + headers={'Accept': '*/*', 'Authorization': 'Basic dG9rZW46dG9rZW4='}, body=None + ) xml = FakedResponse( - headers={'Content-Type': 'application/xml; charset=charset=utf-8'}, status_code=200, - content='xml test', request=response_request) + headers={'Content-Type': 'application/xml; charset=charset=utf-8'}, + status_code=200, + content='xml test', + request=response_request, + ) whatever = FakedResponse( - headers={'Content-Type': 'texto/csv'}, status_code=200, - content='username;age\ntoken;10\ncartman:10', request=response_request) + headers={'Content-Type': 'texto/csv'}, + status_code=200, + content='username;age\ntoken;10\ncartman:10', + request=response_request, + ) jpeg = FakedResponse( - headers={'Content-Type': 'image/jpeg'}, status_code=200, - content='binary content to be ignored', request=response_request) + headers={'Content-Type': 'image/jpeg'}, + status_code=200, + content='binary content to be ignored', + request=response_request, + ) pdf = FakedResponse( - headers={'Content-Type': 'application/pdf'}, status_code=200, - content='binary content to be ignored', request=response_request) + headers={'Content-Type': 'application/pdf'}, + status_code=200, + content='binary content to be ignored', + request=response_request, + ) return locals().get(request.param) @@ -173,15 +192,13 @@ def test_proxies(mocked_get, caplog, endpoint_response): logger = logging.getLogger('requests') Request(logger=logger).get('http://example.net/whatever') assert mocked_get.call_args[1].get('proxies') is None - Request(logger=logger).get('http://example.net/whatever', - proxies={'http': 'http://proxy'}) + Request(logger=logger).get('http://example.net/whatever', proxies={'http': 'http://proxy'}) assert mocked_get.call_args[1].get('proxies') == {'http': 'http://proxy'} with override_settings(REQUESTS_PROXIES={'http': 'http://globalproxy'}): Request(logger=logger).get('http://example.net/whatever') assert mocked_get.call_args[1].get('proxies') == {'http': 'http://globalproxy'} - Request(logger=logger).get('http://example.net/whatever', - proxies={'http': 'http://proxy'}) + Request(logger=logger).get('http://example.net/whatever', proxies={'http': 'http://proxy'}) assert mocked_get.call_args[1].get('proxies') == {'http': 'http://proxy'} # with a linked resource @@ -234,6 +251,7 @@ def test_resource_auth(mocked_get, caplog, endpoint_response): request.get('http://example.net/whatever', auth=None) assert mocked_get.call_args[1].get('auth') is None + @mock.patch('passerelle.utils.RequestSession.send') def test_resource_hawk_auth(mocked_send, caplog, endpoint_response): mocked_send.return_value = endpoint_response @@ -249,9 +267,15 @@ def test_resource_hawk_auth(mocked_send, caplog, endpoint_response): assert 'Authorization' in prepared_method.headers generated_header = prepared_method.headers['Authorization'] - sender = mohawk.Sender(credentials, nonce=hawk_auth.nonce, _timestamp=hawk_auth.timestamp, - url='http://httpbin.org/get', method='GET', content_type='', - content='') + sender = mohawk.Sender( + credentials, + nonce=hawk_auth.nonce, + _timestamp=hawk_auth.timestamp, + url='http://httpbin.org/get', + method='GET', + content_type='', + content='', + ) expected_header = sender.request_header generated_parts = [tuple(e.strip().split('=', 1)) for e in generated_header[5:].split(',')] expected_parts = [tuple(e.strip().split('=', 1)) for e in expected_header[5:].split(',')] @@ -264,9 +288,16 @@ def test_resource_hawk_auth(mocked_send, caplog, endpoint_response): assert 'Authorization' in prepared_method.headers generated_header = prepared_method.headers['Authorization'] - sender = mohawk.Sender(credentials, nonce=hawk_auth.nonce, _timestamp=hawk_auth.timestamp, - url='http://httpbin.org/post', method='POST', content_type='application/json', - content='{"key": "value"}', ext="extra attribute") + sender = mohawk.Sender( + credentials, + nonce=hawk_auth.nonce, + _timestamp=hawk_auth.timestamp, + url='http://httpbin.org/post', + method='POST', + content_type='application/json', + content='{"key": "value"}', + ext="extra attribute", + ) expected_header = sender.request_header generated_parts = [tuple(e.strip().split('=', 1)) for e in generated_header[5:].split(',')] @@ -313,9 +344,11 @@ def test_requests_cache(mocked_get, caplog): response_request = mock.Mock(headers={'Accept': '*/*'}, body=None) mocked_get.return_value = FakedResponse( - headers={'Content-Type': 'text/plain; charset=charset=utf-8'}, - request=response_request, - content=b'hello world', status_code=200) + headers={'Content-Type': 'text/plain; charset=charset=utf-8'}, + request=response_request, + content=b'hello world', + status_code=200, + ) # by default there is no cache assert request.get('http://cache.example.org/').content == b'hello world' @@ -327,13 +360,15 @@ def test_requests_cache(mocked_get, caplog): assert request.get('http://cache.example.org/', cache_duration=15).content == b'hello world' assert mocked_get.call_count == 1 assert request.get('http://cache.example.org/', cache_duration=15).content == b'hello world' - assert mocked_get.call_count == 1 # got a cached response + assert mocked_get.call_count == 1 # got a cached response # value changed mocked_get.return_value = FakedResponse( - headers={'Content-Type': 'text/plain; charset=charset=utf-8'}, - request=response_request, - content=b'hello second world', status_code=200) + headers={'Content-Type': 'text/plain; charset=charset=utf-8'}, + request=response_request, + content=b'hello second world', + status_code=200, + ) assert request.get('http://cache.example.org/', cache_duration=15).content == b'hello world' assert mocked_get.call_count == 1 @@ -343,9 +378,11 @@ def test_requests_cache(mocked_get, caplog): # do not cache errors mocked_get.return_value = FakedResponse( - headers={'Content-Type': 'text/plain; charset=charset=utf-8'}, - request=response_request, - content=b'no such world', status_code=404) + headers={'Content-Type': 'text/plain; charset=charset=utf-8'}, + request=response_request, + content=b'no such world', + status_code=404, + ) mocked_get.reset_mock() response = request.get('http://cache.example.org/404', cache_duration=15) assert response.content == b'no such world' @@ -357,13 +394,21 @@ def test_requests_cache(mocked_get, caplog): # check response headers mocked_get.reset_mock() mocked_get.return_value = FakedResponse( - headers=CaseInsensitiveDict({'Content-Type': 'image/png'}), - request=response_request, - content=b'hello world', status_code=200) - assert request.get('http://cache.example.org/img', cache_duration=15).headers.get('content-type') == 'image/png' + headers=CaseInsensitiveDict({'Content-Type': 'image/png'}), + request=response_request, + content=b'hello world', + status_code=200, + ) + assert ( + request.get('http://cache.example.org/img', cache_duration=15).headers.get('content-type') + == 'image/png' + ) assert mocked_get.call_count == 1 - assert request.get('http://cache.example.org/img', cache_duration=15).headers.get('content-type') == 'image/png' - assert mocked_get.call_count == 1 # got a cached response + assert ( + request.get('http://cache.example.org/img', cache_duration=15).headers.get('content-type') + == 'image/png' + ) + assert mocked_get.call_count == 1 # got a cached response @mock.patch('passerelle.utils.RequestSession.request') @@ -391,16 +436,19 @@ def test_timeout(mocked_get, caplog, endpoint_response): def test_log_http_request(caplog): @urlmatch() def bad_headers(url, request): - return response(200, 'coin', - headers={'Error Webservice': b'\xe9'}, - request=request) + return response(200, 'coin', headers={'Error Webservice': b'\xe9'}, request=request) + with HTTMock(bad_headers): resp = requests.get('https://example.com/') caplog.set_level(logging.DEBUG) assert len(caplog.records) == 0 log_http_request(logging.getLogger(), resp.request, resp) assert len(caplog.records) == 1 - extra = {key: value for key, value in caplog.records[0].__dict__.items() if key.startswith(('request_', 'response_'))} + extra = { + key: value + for key, value in caplog.records[0].__dict__.items() + if key.startswith(('request_', 'response_')) + } del extra['request_headers']['User-Agent'] assert extra == { 'request_headers': { @@ -409,8 +457,6 @@ def test_log_http_request(caplog): u'Connection': u'keep-alive', }, 'request_url': 'https://example.com/', - 'response_headers': { - u'Error Webservice': u'\ufffd' - }, - 'response_status': 200 + 'response_headers': {u'Error Webservice': u'\ufffd'}, + 'response_status': 200, } diff --git a/tests/test_rsa13.py b/tests/test_rsa13.py index 5af4a049..b8cbf423 100644 --- a/tests/test_rsa13.py +++ b/tests/test_rsa13.py @@ -331,7 +331,9 @@ def test_platform_beneficiaire_detail(app, rsa13, url): BENEFICIAIRE_TRANSPORT = {'cumuls': [{'duree': 54, 'type': 'GTU'}]} -@mock_response(['/api/platform/11/beneficiaire/386981/transport/', {'err': 0, 'data': BENEFICIAIRE_TRANSPORT}]) +@mock_response( + ['/api/platform/11/beneficiaire/386981/transport/', {'err': 0, 'data': BENEFICIAIRE_TRANSPORT}] +) def test_platform_beneficiaire_transport(app, rsa13, url): response = app.get(url + 'platform/11/beneficiaire/386981/transport/') assert response.json == {'err': 0, 'data': BENEFICIAIRE_TRANSPORT} @@ -407,7 +409,9 @@ BENEFICIAIRE_CONTRAT_DETAIL = { } -@mock_response(['/api/platform/11/beneficiaire/386981/contrat/15/', {'err': 0, 'data': BENEFICIAIRE_CONTRAT_DETAIL}]) +@mock_response( + ['/api/platform/11/beneficiaire/386981/contrat/15/', {'err': 0, 'data': BENEFICIAIRE_CONTRAT_DETAIL}] +) def test_platform_beneficiaire_contrat_detail(app, rsa13, url): response = app.get(url + 'platform/11/beneficiaire/386981/contrat/15/') assert response.json == { @@ -466,7 +470,9 @@ BENEFICIAIRE_ACTION_DETAIL = { } -@mock_response(['/api/platform/11/beneficiaire/386981/action/663774/', {'err': 0, 'data': BENEFICIAIRE_ACTION_DETAIL}]) +@mock_response( + ['/api/platform/11/beneficiaire/386981/action/663774/', {'err': 0, 'data': BENEFICIAIRE_ACTION_DETAIL}] +) def test_platform_beneficiaire_action_detail(app, rsa13, url): response = app.get(url + 'platform/11/beneficiaire/386981/action/663774/') assert response.json == { @@ -488,10 +494,12 @@ BENEFICIAIRE_FONDSAIDE = [ ] -@mock_response(['/api/platform/11/beneficiaire/386981/fondsaide/', {'err': 0, 'data': BENEFICIAIRE_FONDSAIDE}]) +@mock_response( + ['/api/platform/11/beneficiaire/386981/fondsaide/', {'err': 0, 'data': BENEFICIAIRE_FONDSAIDE}] +) def test_platform_beneficiaire_fondsaide(app, rsa13, url): response = app.get(url + 'platform/11/beneficiaire/386981/fondsaide/') -# 365385/fonds-d-aide/fond-d-aide/39605/ + # 365385/fonds-d-aide/fond-d-aide/39605/ assert response.json == { 'err': 0, 'data': BENEFICIAIRE_FONDSAIDE, @@ -509,9 +517,7 @@ BENEFICIAIRE_FONDSAIDE_DETAIL = { 'date_relance': None, 'num_versement': 1, 'reception': 'Non', - 'type': 'Document officiel de ' - 'présentation au code de la ' - 'route (ETG)', + 'type': 'Document officiel de ' 'présentation au code de la ' 'route (ETG)', }, { 'conforme': None, @@ -550,7 +556,9 @@ BENEFICIAIRE_FONDSAIDE_DETAIL = { } -@mock_response(['/api/platform/11/beneficiaire/386981/fondsaide/1/', {'err': 0, 'data': BENEFICIAIRE_FONDSAIDE_DETAIL}]) +@mock_response( + ['/api/platform/11/beneficiaire/386981/fondsaide/1/', {'err': 0, 'data': BENEFICIAIRE_FONDSAIDE_DETAIL}] +) def test_platform_beneficiaire_fondsaide_detail(app, rsa13, url): response = app.get(url + 'platform/11/beneficiaire/386981/fondsaide/1/') assert response.json == { @@ -581,7 +589,9 @@ BENEFICIAIRE_AFFECTATION = [ ] -@mock_response(['/api/platform/11/beneficiaire/386981/affectation/', {'err': 0, 'data': BENEFICIAIRE_AFFECTATION}]) +@mock_response( + ['/api/platform/11/beneficiaire/386981/affectation/', {'err': 0, 'data': BENEFICIAIRE_AFFECTATION}] +) def test_platform_beneficiaire_affectation(app, rsa13, url): response = app.get(url + 'platform/11/beneficiaire/386981/affectation/') assert response.json == { @@ -613,7 +623,10 @@ BENEFICIAIRE_AFFECTATION_DETAIL = { @mock_response( - ['/api/platform/11/beneficiaire/386981/affectation/1/', {'err': 0, 'data': BENEFICIAIRE_AFFECTATION_DETAIL}] + [ + '/api/platform/11/beneficiaire/386981/affectation/1/', + {'err': 0, 'data': BENEFICIAIRE_AFFECTATION_DETAIL}, + ] ) def test_platform_beneficiaire_affectation_detail(app, rsa13, url): response = app.get(url + 'platform/11/beneficiaire/386981/affectation/1/') diff --git a/tests/test_sigerly.py b/tests/test_sigerly.py index 2e2e362e..f36d0acf 100644 --- a/tests/test_sigerly.py +++ b/tests/test_sigerly.py @@ -141,7 +141,11 @@ def test_query_id(app, connector): def test_query_filters(app, connector): endpoint = get_endpoint('query') - payload = {'date_debut_demande': '19/11/2020', 'date_fin_demande': '19/11/2020', 'insee': '::069291:::069283::'} + payload = { + 'date_debut_demande': '19/11/2020', + 'date_fin_demande': '19/11/2020', + 'insee': '::069291:::069283::', + } @httmock.urlmatch(netloc='dummy-server.org', path='/getIntervention.php', method='POST') def sigerly_mock(url, request): diff --git a/tests/test_signature.py b/tests/test_signature.py index 319d41f1..157e6bd0 100644 --- a/tests/test_signature.py +++ b/tests/test_signature.py @@ -38,8 +38,9 @@ def test_signature(): # Test timedelta parameter now = datetime.datetime.utcnow() - assert '×tamp=%s' % urllib.quote(now.strftime('%Y-%m-%dT%H:%M:%SZ')) in \ - signature.sign_url(URL, KEY, timestamp=now) + assert '×tamp=%s' % urllib.quote(now.strftime('%Y-%m-%dT%H:%M:%SZ')) in signature.sign_url( + URL, KEY, timestamp=now + ) # Test nonce parameter assert '&nonce=uuu&' in signature.sign_url(URL, KEY, nonce='uuu') @@ -49,21 +50,22 @@ def test_signature(): # Test known_nonce def known_nonce(nonce): return nonce == 'xxx' + assert signature.check_url(signature.sign_url(URL, KEY), KEY, known_nonce=known_nonce) assert signature.check_url(signature.sign_url(URL, KEY, nonce='zzz'), KEY, known_nonce=known_nonce) assert not signature.check_url(signature.sign_url(URL, KEY, nonce='xxx'), KEY, known_nonce=known_nonce) assert not signature.check_url(signature.sign_url(URL, KEY, nonce=''), KEY, known_nonce=known_nonce) # Test timedelta - now = (datetime.datetime.utcnow() - datetime.timedelta(seconds=20)) + now = datetime.datetime.utcnow() - datetime.timedelta(seconds=20) assert signature.check_url(signature.sign_url(URL, KEY, timestamp=now), KEY) - now = (datetime.datetime.utcnow() + datetime.timedelta(seconds=20)) + now = datetime.datetime.utcnow() + datetime.timedelta(seconds=20) assert signature.check_url(signature.sign_url(URL, KEY, timestamp=now), KEY) # too late - now = (datetime.datetime.utcnow() - datetime.timedelta(seconds=40)) + now = datetime.datetime.utcnow() - datetime.timedelta(seconds=40) assert not signature.check_url(signature.sign_url(URL, KEY, timestamp=now), KEY) - now = (datetime.datetime.utcnow() - datetime.timedelta(seconds=20)) + now = datetime.datetime.utcnow() - datetime.timedelta(seconds=20) assert not signature.check_url(signature.sign_url(URL, KEY, timestamp=now), KEY, timedelta=10) # too early - now = (datetime.datetime.utcnow() + datetime.timedelta(seconds=40)) + now = datetime.datetime.utcnow() + datetime.timedelta(seconds=40) assert not signature.check_url(signature.sign_url(URL, KEY, timestamp=now), KEY) diff --git a/tests/test_sms.py b/tests/test_sms.py index 03f80648..1e937675 100644 --- a/tests/test_sms.py +++ b/tests/test_sms.py @@ -41,19 +41,20 @@ def test_clean_numbers(): def connector(request, db): klass = request.param kwargs = getattr(klass, 'TEST_DEFAULTS', {}).get('create_kwargs', {}) - kwargs.update({ - 'title': klass.__name__, - 'slug': klass.__name__.lower(), - 'description': klass.__name__, - }) + kwargs.update( + { + 'title': klass.__name__, + 'slug': klass.__name__.lower(), + 'description': klass.__name__, + } + ) c = klass.objects.create(**kwargs) api = ApiUser.objects.create(username='apiuser', fullname='Api User', description='api') obj_type = ContentType.objects.get_for_model(c) # no access check - AccessRight.objects.create(codename='can_send_messages', - apiuser=api, - resource_type=obj_type, - resource_pk=c.pk) + AccessRight.objects.create( + codename='can_send_messages', apiuser=api, resource_type=obj_type, resource_pk=c.pk + ) return c @@ -83,9 +84,8 @@ def test_connectors(app, connector, freezer): # perform job freezer.move_to('2019-01-01 01:00:03') with utils.mock_url( - connector.URL, - test_vector.get('response', ''), - test_vector.get('status_code', 200)): + connector.URL, test_vector.get('response', ''), test_vector.get('status_code', 200) + ): connector.jobs() job = Job.objects.get(id=job_id) if job.status == 'failed': @@ -106,8 +106,8 @@ def test_manage_views(admin_user, app, connector): app = login(app) resp = app.get(url) description_fields = [ - x.text.split(':')[0] - for x in resp.html.find('div', {'id': 'description'}).find_all('p')] + x.text.split(':')[0] for x in resp.html.find('div', {'id': 'description'}).find_all('p') + ] assert 'Default country code' in description_fields assert 'Default trunk prefix' in description_fields assert 'Maximum message length' in description_fields @@ -131,8 +131,7 @@ def test_manage_views_ovh(app, connector, admin_user): connector.save() url = '/%s/%s/' % (connector.get_connector_slug(), connector.slug) resp = app.get(url) - description_fields = [ - x.text for x in resp.html.find('div', {'id': 'description'}).find_all('p')] + description_fields = [x.text for x in resp.html.find('div', {'id': 'description'}).find_all('p')] assert any(x for x in description_fields if 'Default country code' in x) assert any(x for x in description_fields if '44' in x) assert not any(x for x in description_fields if 'secret' in x) @@ -184,7 +183,8 @@ def test_sms_nostop_parameter(app, connector): for path in (base_path, base_path + '?nostop=1', base_path + '?nostop=foo', base_path + '?nostop'): send_patch = mock.patch( 'passerelle.apps.%s.models.%s.send_msg' - % (connector.__class__._meta.app_label, connector.__class__.__name__)) + % (connector.__class__._meta.app_label, connector.__class__.__name__) + ) with send_patch as send_function: send_function.return_value = {} result = app.post_json(base_path, params=payload) @@ -194,12 +194,15 @@ def test_sms_nostop_parameter(app, connector): @pytest.mark.parametrize('connector', [OVHSMSGateway], indirect=True) -@pytest.mark.parametrize('to, destination', [ - ('06 12 34 56 78', '0033612345678'), - ('06.12.34.56.78', '0033612345678'), - ('06-12-34-56-78', '0033612345678'), - ('+33/612345678', '0033612345678'), -]) +@pytest.mark.parametrize( + 'to, destination', + [ + ('06 12 34 56 78', '0033612345678'), + ('06.12.34.56.78', '0033612345678'), + ('06-12-34-56-78', '0033612345678'), + ('+33/612345678', '0033612345678'), + ], +) def test_send_schema(app, connector, to, destination): base_path = '/%s/%s/send/' % (connector.get_connector_slug(), connector.slug) payload = { @@ -209,7 +212,8 @@ def test_send_schema(app, connector, to, destination): } send_patch = mock.patch( 'passerelle.apps.%s.models.%s.send_msg' - % (connector.__class__._meta.app_label, connector.__class__.__name__)) + % (connector.__class__._meta.app_label, connector.__class__.__name__) + ) with send_patch as send_function: app.post_json(base_path, params=payload) connector.jobs() @@ -218,16 +222,18 @@ def test_send_schema(app, connector, to, destination): def test_ovh_new_api(app, freezer): connector = OVHSMSGateway.objects.create( - slug='ovh', account='sms-test42', + slug='ovh', + account='sms-test42', application_key='RHrTdU2oTsrVC0pu', application_secret='CLjtS69tTcPgCKxedeoZlgMSoQGSiXMa', - consumer_key='iF0zi0MJrbjNcI3hvuvwkhNk8skrigxz' + consumer_key='iF0zi0MJrbjNcI3hvuvwkhNk8skrigxz', ) api = ApiUser.objects.create(username='apiuser') obj_type = ContentType.objects.get_for_model(connector) # no access check - AccessRight.objects.create(codename='can_send_messages', apiuser=api, resource_type=obj_type, - resource_pk=connector.pk) + AccessRight.objects.create( + codename='can_send_messages', apiuser=api, resource_type=obj_type, resource_pk=connector.pk + ) payload = { 'message': 'hello', @@ -248,7 +254,7 @@ def test_ovh_new_api(app, freezer): 'validReceivers': ['+33688888888', '+33677777777'], 'totalCreditsRemoved': 1, 'ids': [241615100], - 'invalidReceivers': [] + 'invalidReceivers': [], } base_url = connector.API_URL % {'serviceName': 'sms-test42'} url = base_url + 'jobs/' @@ -272,8 +278,9 @@ def test_sms_test_send(admin_user, app, connector): resp = app.get(url) link = resp.html.find('div', {'id': 'endpoints'}).find_all('a')[-1] assert 'Send a test message' in link.text - assert link['href'] == reverse('sms-test-send', kwargs={ - 'connector': connector.get_connector_slug(), 'slug': connector.slug}) + assert link['href'] == reverse( + 'sms-test-send', kwargs={'connector': connector.get_connector_slug(), 'slug': connector.slug} + ) resp = app.get(link['href']) resp.form['number'] = '+33688888888' @@ -283,7 +290,9 @@ def test_sms_test_send(admin_user, app, connector): send_function.return_value = {} resp = resp.form.submit() assert send_function.call_args[1] == { - 'text': 'hello', 'sender': '+33699999999', 'destinations': ['0033688888888'], + 'text': 'hello', + 'sender': '+33699999999', + 'destinations': ['0033688888888'], 'stop': False, } assert resp.status_code == 302 @@ -293,10 +302,11 @@ def test_sms_test_send(admin_user, app, connector): def test_ovh_new_api_credit(app, freezer, admin_user): login(app) connector = OVHSMSGateway.objects.create( - slug='ovh', account='sms-test42', + slug='ovh', + account='sms-test42', application_key='RHrTdU2oTsrVC0pu', application_secret='CLjtS69tTcPgCKxedeoZlgMSoQGSiXMa', - consumer_key='iF0zi0MJrbjNcI3hvuvwkhNk8skrigxz' + consumer_key='iF0zi0MJrbjNcI3hvuvwkhNk8skrigxz', ) manager_url = '/%s/%s/' % (connector.get_connector_slug(), connector.slug) @@ -327,7 +337,9 @@ def test_ovh_new_api_credit(app, freezer, admin_user): def test_ovh_alert_emails(app, freezer, mailoutbox): connector = OVHSMSGateway.objects.create( - slug='test-ovh', title='Test OVH', account='sms-test42', + slug='test-ovh', + title='Test OVH', + account='sms-test42', application_key='RHrTdU2oTsrVC0pu', application_secret='CLjtS69tTcPgCKxedeoZlgMSoQGSiXMa', consumer_key='iF0zi0MJrbjNcI3hvuvwkhNk8skrigxz', @@ -337,8 +349,9 @@ def test_ovh_alert_emails(app, freezer, mailoutbox): ) api = ApiUser.objects.create(username='apiuser') obj_type = ContentType.objects.get_for_model(connector) - AccessRight.objects.create(codename='can_send_messages', apiuser=api, resource_type=obj_type, - resource_pk=connector.pk) + AccessRight.objects.create( + codename='can_send_messages', apiuser=api, resource_type=obj_type, resource_pk=connector.pk + ) freezer.move_to('2019-01-01 00:00:00') resp = {'creditsLeft': 101} @@ -378,7 +391,9 @@ def test_ovh_alert_emails(app, freezer, mailoutbox): def test_ovh_token_request(admin_user, app): connector = OVHSMSGateway.objects.create( - slug='test-ovh', title='Test OVH', account='sms-test42', + slug='test-ovh', + title='Test OVH', + account='sms-test42', application_key='RHrTdU2oTsrVC0pu', application_secret='CLjtS69tTcPgCKxedeoZlgMSoQGSiXMa', ) @@ -399,7 +414,7 @@ def test_ovh_token_request(admin_user, app): request = mocked.handlers[0].call['requests'][0] body = json.loads(request.body.decode()) assert 'accessRules' in body - redirect_url = body['redirection'][len('http://testserver'):] + redirect_url = body['redirection'][len('http://testserver') :] resp = app.get(redirect_url).follow() assert 'Successfuly completed connector configuration' in resp.text diff --git a/tests/test_soap.py b/tests/test_soap.py index 377aef0a..e493329a 100644 --- a/tests/test_soap.py +++ b/tests/test_soap.py @@ -36,7 +36,6 @@ class BarPlugin(Plugin): class SOAPResource(object): - def __init__(self): self.requests = requests.Session() self.wsdl_url = WSDL @@ -56,21 +55,22 @@ def test_soap_client(): def test_disable_strict_mode(mocked_post): response = requests.Response() response.status_code = 200 - response._content = force_bytes(''' + response._content = force_bytes( + ''' 4.20 -''') +''' + ) mocked_post.return_value = response soap_resource = SOAPResource() client = SOAPClient(soap_resource) match = "Unexpected element %s, expected %s" % (repr(u'price'), repr(u'skipMe')) - with pytest.raises( - XMLParseError, match=match): + with pytest.raises(XMLParseError, match=match): client.service.GetLastTradePrice(tickerSymbol='banana') client = SOAPClient(soap_resource, settings=Settings(strict=False)) @@ -84,7 +84,8 @@ def test_disable_strict_mode(mocked_post): def test_remove_first_bytes_for_xml(mocked_post): response = requests.Response() response.status_code = 200 - response._content = force_bytes('''blabla \n + response._content = force_bytes( + '''blabla \n @@ -92,7 +93,8 @@ def test_remove_first_bytes_for_xml(mocked_post): 4.20 -\n bloublou''') +\n bloublou''' + ) mocked_post.return_value = response soap_resource = SOAPResource() @@ -101,10 +103,8 @@ def test_remove_first_bytes_for_xml(mocked_post): with pytest.raises(TransportError): client.service.GetLastTradePrice(tickerSymbol='banana') - client = SOAPClient(soap_resource, - transport_kwargs={'remove_first_bytes_for_xml': True}) + client = SOAPClient(soap_resource, transport_kwargs={'remove_first_bytes_for_xml': True}) result = client.service.GetLastTradePrice(tickerSymbol='banana') assert len(result) == 2 assert result['skipMe'] == 1.2 assert result['price'] == 4.2 - diff --git a/tests/test_solis.py b/tests/test_solis.py index f7a9c1e5..d0601709 100644 --- a/tests/test_solis.py +++ b/tests/test_solis.py @@ -19,7 +19,7 @@ APAINFOS = { 'consultationDeMesDroits': '{"demandeAsg":[{"demande":{"indexDemande":42,"type":"Allocation Personnalis\xc3\xa9e Autonomie","nature":"Domicile"},"droit":{"dateDebut":"2017-01-05","dateFin":"2019-01-31"},"complementDossier":{"dateDepot":"2016-11-15","dateArrivee":"2016-11-16","dateDossierComplet":"2016-11-17"},"gir":{"type":"Synth\xc3\xa8se","gir":3,"dateEvaluation":"2017-02-01"},"suivi":{"instructeur":{"civilite":"madame","nom":"ZEPEQPE","prenom":"EPOZOE","telephone":"0344974383","mail":"e.zepeqpe@xoppe.pp"},"dateDecision":"2017-01-05"},"bilan":{"pourcentageTM":2.1973443031311035},"prestationAccordeeAsg":[{"prestation":"Accueil de jour GIR 1-2","periode":{"dateEffet":"2017-01-05","dateFin":"2019-01-31"},"tiers":{"type":"Ind\xc3\xa9termin\xc3\xa9","identite":null,"tarif":null,"quantitatif":null},"quantite":0,"montant":{"accorde":0,"participation":0,"verse":0},"attributaire":{"type":"Tuteur","identite":"Association Tut\xc3\xa9laire de la Somme"}},{"prestation":"Articles d\'hygi\xc3\xa8ne forfait 45\xc2\x80","periode":{"dateEffet":"2017-01-05","dateFin":null},"tiers":{"type":"Ind\xc3\xa9termin\xc3\xa9","identite":null,"tarif":45,"quantitatif":"Mois"},"quantite":1,"montant":{"accorde":45,"participation":1.68,"verse":43.32},"attributaire":{"type":"B\xc3\xa9n\xc3\xa9ficiaire","identite":"PYPPENNE Pecile"}},{"prestation":"Petite structure","periode":{"dateEffet":"2017-01-05","dateFin":"2019-01-31"},"tiers":{"type":"Etablissement","identite":"MARPA LES NACRES","tarif":null,"quantitatif":null},"quantite":0,"montant":{"accorde":440.42,"participation":7.68,"verse":432.74},"attributaire":{"type":"Etablissement","identite":"MARPA LES NACRES"}},{"prestation":"Aide humaine prestataire","periode":{"dateEffet":"2017-01-05","dateFin":"2019-01-31"},"tiers":{"type":"Prestataire","identite":"COMMUNAUTE DE COMMUNES DU TERRITOIRE NORD PICARDIE BERNAVILLE","tarif":19,"quantitatif":"Heure(s)"},"quantite":45.5,"montant":{"accorde":864.5,"participation":18.93,"verse":845.57},"attributaire":{"type":"Prestataire","identite":"COMMUNAUTE DE COMMUNES DU TERRITOIRE NORD PICARDIE BERNAVILLE"}},{"prestation":"Articles d\'hygi\xc3\xa8ne forfait 90\xc2\x80","periode":{"dateEffet":"2017-01-05","dateFin":null},"tiers":{"type":"Ind\xc3\xa9termin\xc3\xa9","identite":null,"tarif":90,"quantitatif":"Mois"},"quantite":1,"montant":{"accorde":90,"participation":3.35,"verse":86.65},"attributaire":{"type":"B\xc3\xa9n\xc3\xa9ficiaire","identite":"PYPPENNE Pecile"}}]}]}', 'suiviDemandeEnInstruction': '{"demandeAsg":[]}', 'suiviDemandeHistorique': '{"demandeAsg":[{"demande":{"indexDemande":42,"type":"Allocation Personnalis\xc3\xa9e Autonomie","nature":"Domicile"},"droit":{"dateDebut":"2013-03-01","dateFin":"2013-06-19"},"complementDossier":{"dateArrivee":null,"dateDossierComplet":"2012-10-25"},"suivi":{"decision":"Accord","dateDecision":"2013-02-12"}},{"demande":{"indexDemande":43,"type":"Allocation Personnalis\xc3\xa9e Autonomie","nature":"Domicile"},"droit":{"dateDebut":"2013-06-20","dateFin":"2016-03-31"},"complementDossier":{"dateArrivee":null,"dateDossierComplet":"2012-10-25"},"suivi":{"decision":"Accord","dateDecision":"2013-06-25"}},{"demande":{"indexDemande":44,"type":"Allocation Personnalis\xc3\xa9e Autonomie","nature":"Domicile"},"droit":{"dateDebut":"2016-04-01","dateFin":"2017-01-04"},"complementDossier":{"dateArrivee":"2016-06-06","dateDossierComplet":"2016-06-06"},"suivi":{"decision":"Accord","dateDecision":"2016-06-14"}}]}', - 'propositionPlanAide': '{"demandeAsg":[{"demande":{"type":"Allocation Personnalis\\u00e9e Autonomie","indexDemande":42,"nature":"Domicile"},"droit":{"dateDebut":"2016-08-23","dateFin":"2018-08-31"},"planAide":{"commentaire":"","prestationsPlanAide":{"prestationPlanAide":[{"tiers":{"tarif":12.8,"identite":"CCAS DE MERS LES BAINS","quantitatif":"Heure(s)","type":"Prestataire"},"quantite":84,"montant":1075.2,"prestation":"Aide humaine mandataire"},{"tiers":{"tarif":90,"identite":null,"quantitatif":"Mois","type":"Ind\\u00e9termin\\u00e9"},"quantite":1,"montant":90,"prestation":"Articles d\'hygi\\u00e8ne forfait 90"}]},"dateReponse":null,"datePropositionPlan":null,"avis":""},"complementDossier":{"dateDepot":"2016-06-22"},"suivi":{"dateDecision":"2016-08-23"}}]}', + 'propositionPlanAide': '{"demandeAsg":[{"demande":{"type":"Allocation Personnalis\\u00e9e Autonomie","indexDemande":42,"nature":"Domicile"},"droit":{"dateDebut":"2016-08-23","dateFin":"2018-08-31"},"planAide":{"commentaire":"","prestationsPlanAide":{"prestationPlanAide":[{"tiers":{"tarif":12.8,"identite":"CCAS DE MERS LES BAINS","quantitatif":"Heure(s)","type":"Prestataire"},"quantite":84,"montant":1075.2,"prestation":"Aide humaine mandataire"},{"tiers":{"tarif":90,"identite":null,"quantitatif":"Mois","type":"Ind\\u00e9termin\\u00e9"},"quantite":1,"montant":90,"prestation":"Articles d\'hygi\\u00e8ne forfait 90"}]},"dateReponse":null,"datePropositionPlan":null,"avis":""},"complementDossier":{"dateDepot":"2016-06-22"},"suivi":{"dateDecision":"2016-08-23"}}]}', } APAREQUEST = '{"demandeAsg":{"visite":{"date":"2016-07-07","heure":"1330"},"demande":{"type":"Allocation Personnalis\\u00e9e Autonomie","indexDemande":42,"nature":"Domicile"},"droit":{"dateDebut":"2016-08-23","dateFin":"2018-08-31"},"complementDossier":{"dateArrivee":"2016-06-22","dateDossierComplet":"2016-06-22"},"suivi":{"dateDecision":"2016-08-23","decision":"Accord"}}}' DEPARTEMENTS = '{"departements":[{"code":"1","libelle":"Ain","pays":{"code":"79","libelle":"France"}},{"code":"2","libelle":"Aisne","pays":{"code":"79","libelle":"France"}},{"code":"3","libelle":"Allier","pays":{"code":"79","libelle":"France"}},{"code":"4","libelle":"Alpes de Haute Provence","pays":{"code":"79","libelle":"France"}},{"code":"5","libelle":"Hautes Alpes","pays":{"code":"79","libelle":"France"}},{"code":"6","libelle":"Alpes Maritimes","pays":{"code":"79","libelle":"France"}},{"code":"7","libelle":"Ardèche","pays":{"code":"79","libelle":"France"}},{"code":"8","libelle":"Ardennes","pays":{"code":"79","libelle":"France"}}]}' @@ -77,13 +77,14 @@ RSA_ACTIONS = '''[ ]''' - @pytest.fixture def solis(db): - return Solis.objects.create(slug='test', - service_url='https://solis.example.net/solisapi/', - basic_auth_username='usertest', - basic_auth_password='userpass') + return Solis.objects.create( + slug='test', + service_url='https://solis.example.net/solisapi/', + basic_auth_username='usertest', + basic_auth_password='userpass', + ) def test_solis_restricted_access(app, solis): @@ -114,22 +115,27 @@ def test_solis_restricted_access(app, solis): @pytest.fixture def ping_response(): response_request = mock.Mock(headers={'Accept': '*/*'}, body=None) - return utils.FakedResponse(headers={'Content-Type': 'text/plain'}, status_code=200, - request=response_request) + return utils.FakedResponse( + headers={'Content-Type': 'text/plain'}, status_code=200, request=response_request + ) + def test_solis_ping(app, solis, ping_response): # full opened access api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(solis) - AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, - resource_pk=solis.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=solis.pk + ) endpoint = utils.generic_endpoint_url('solis', 'ping', slug=solis.slug) with mock.patch('passerelle.utils.RequestSession.request') as requests_get: - for bad_content in ('error', - '{"foo": "bar"}', - '["not", "a", "dict"]', - '{"response": "Solis API est en panne"}'): + for bad_content in ( + 'error', + '{"foo": "bar"}', + '["not", "a", "dict"]', + '{"response": "Solis API est en panne"}', + ): ping_response.content = bad_content requests_get.return_value = ping_response resp = app.get(endpoint, status=200) @@ -176,16 +182,19 @@ def test_solis_ping(app, solis, ping_response): solis.http_proxy = 'http://proxy:3128/' solis.save() resp = app.get(endpoint, status=200) - assert requests_get.call_args[1]['proxies'] == {'http': 'http://proxy:3128/', - 'https': 'http://proxy:3128/'} + assert requests_get.call_args[1]['proxies'] == { + 'http': 'http://proxy:3128/', + 'https': 'http://proxy:3128/', + } def test_solis_apa_link_infos_unlink(app, solis): # full opened access api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(solis) - AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, - resource_pk=solis.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=solis.pk + ) # link with mock.patch('passerelle.utils.Request.post') as requests_post: # get solis token @@ -209,9 +218,9 @@ def test_solis_apa_link_infos_unlink(app, solis): assert 'missing user_id/code credentials' in resp.json['err_desc'] requests_post.return_value = utils.FakedResponse(content=APATOKEN_403, status_code=403) - resp = app.post_json(endpoint, - params={'user_id': 'x', 'code': 'x', 'name_id': NAMEID}, - status=200) + resp = app.post_json( + endpoint, params={'user_id': 'x', 'code': 'x', 'name_id': NAMEID}, status=200 + ) assert requests_post.call_count == 1 assert requests_get.call_count == 0 assert resp.json['err'] == 1 @@ -220,11 +229,12 @@ def test_solis_apa_link_infos_unlink(app, solis): assert SolisAPALink.objects.count() == 0 requests_post.return_value = utils.FakedResponse(content=APATOKEN, status_code=200) - requests_get.return_value = utils.FakedResponse(content=APAINFOS['exportDonneesIndividu'], - status_code=200) - resp = app.post_json(endpoint, - params={'name_id': NAMEID, 'user_id': '42', 'code': 'foo'}, - status=200) + requests_get.return_value = utils.FakedResponse( + content=APAINFOS['exportDonneesIndividu'], status_code=200 + ) + resp = app.post_json( + endpoint, params={'name_id': NAMEID, 'user_id': '42', 'code': 'foo'}, status=200 + ) assert requests_post.call_count == 2 assert requests_get.call_count == 1 assert resp.json['err'] == 0 @@ -238,9 +248,9 @@ def test_solis_apa_link_infos_unlink(app, solis): assert SolisAPALink.objects.first().text == 'Mme Pecile PYPPENNE (NPYNEZ)' # change code - resp = app.post_json(endpoint, - params={'name_id': NAMEID, 'user_id': '42', 'code': 'bar'}, - status=200) + resp = app.post_json( + endpoint, params={'name_id': NAMEID, 'user_id': '42', 'code': 'bar'}, status=200 + ) assert requests_post.call_count == 3 assert requests_get.call_count == 2 assert resp.json['err'] == 0 @@ -254,9 +264,9 @@ def test_solis_apa_link_infos_unlink(app, solis): assert SolisAPALink.objects.first().text == 'Mme Pecile PYPPENNE (NPYNEZ)' # second link - resp = app.post_json(endpoint, - params={'name_id': NAMEID, 'user_id': '53', 'code': 'bar'}, - status=200) + resp = app.post_json( + endpoint, params={'name_id': NAMEID, 'user_id': '53', 'code': 'bar'}, status=200 + ) assert requests_post.call_count == 4 assert requests_get.call_count == 3 assert resp.json['err'] == 0 @@ -266,8 +276,10 @@ def test_solis_apa_link_infos_unlink(app, solis): assert SolisAPALink.objects.count() == 2 # verify recorded names after link - assert [x['text'] for x in SolisAPALink.objects.values('text')] == \ - ['Mme Pecile PYPPENNE (NPYNEZ)', 'Mme Pecile PYPPENNE (NPYNEZ)'] + assert [x['text'] for x in SolisAPALink.objects.values('text')] == [ + 'Mme Pecile PYPPENNE (NPYNEZ)', + 'Mme Pecile PYPPENNE (NPYNEZ)', + ] endpoint = utils.generic_endpoint_url('solis', 'apa-links', slug=solis.slug) resp = app.get(endpoint, status=400) # missing name_id assert resp.json['err'] == 1 @@ -275,8 +287,7 @@ def test_solis_apa_link_infos_unlink(app, solis): resp = app.get(endpoint, status=200) assert resp.json['err'] == 0 assert len(resp.json['data']) == 2 - assert resp.json['data'][0]['text'] == resp.json['data'][1]['text'] == \ - 'Mme Pecile PYPPENNE (NPYNEZ)' + assert resp.json['data'][0]['text'] == resp.json['data'][1]['text'] == 'Mme Pecile PYPPENNE (NPYNEZ)' # get base informations from a linked user (exportDonneesIndividu) changed_name = APAINFOS['exportDonneesIndividu'].replace('PYPPENNE', 'PEPONE') @@ -290,8 +301,7 @@ def test_solis_apa_link_infos_unlink(app, solis): assert resp.json['err'] == 0 assert resp.json['data']['individu']['nomUsuel'] == 'PEPONE' # user "text" updated in link: - assert SolisAPALink.objects.get(name_id=NAMEID, user_id='42').text == \ - 'Mme Pecile PEPONE (NPYNEZ)' + assert SolisAPALink.objects.get(name_id=NAMEID, user_id='42').text == 'Mme Pecile PEPONE (NPYNEZ)' # get all kind of informations for apa_endpoint in APAINFOS: @@ -304,28 +314,27 @@ def test_solis_apa_link_infos_unlink(app, solis): assert resp.json['err'] == 1 endpoint = endpoint_base + '?name_id=%s&user_id=53&information=%s' % (NAMEID, apa_endpoint) - requests_get.return_value = utils.FakedResponse(content=APAINFOS[apa_endpoint], - status_code=200) + requests_get.return_value = utils.FakedResponse( + content=APAINFOS[apa_endpoint], status_code=200 + ) resp = app.get(endpoint, status=200) assert requests_post.call_count == 1 # get a token - assert requests_get.call_count == 1 # get informations + assert requests_get.call_count == 1 # get informations assert ('/asg/apa/%s' % apa_endpoint) in requests_get.call_args[0][0] assert resp.json['err'] == 0 assert resp.json['data'] # solis api crash - requests_get.return_value = utils.FakedResponse(content='boum', - status_code=500) + requests_get.return_value = utils.FakedResponse(content='boum', status_code=500) resp = app.get(endpoint, status=200) assert requests_post.call_count == 2 # get a token - assert requests_get.call_count == 2 # get informations + assert requests_get.call_count == 2 # get informations assert ('/asg/apa/%s' % apa_endpoint) in requests_get.call_args[0][0] assert resp.json['err'] == 1 assert resp.json['err_desc'].startswith('error status:500') assert resp.json['data'] == {'json_content': None, 'status_code': 500} - requests_get.return_value = utils.FakedResponse(content='{"error":"foobar"}', - status_code=500) + requests_get.return_value = utils.FakedResponse(content='{"error":"foobar"}', status_code=500) resp = app.get(endpoint, status=200) assert resp.json['err'] == 1 assert resp.json['err_desc'].startswith('error status:500') @@ -379,11 +388,20 @@ def test_solis_apa_link_infos_unlink(app, solis): assert resp.json['err'] == 1 # get indexDemande 42 in lists - for information in ('consultationDeMesDroits', 'suiviDemandeHistorique', 'propositionPlanAide',): + for information in ( + 'consultationDeMesDroits', + 'suiviDemandeHistorique', + 'propositionPlanAide', + ): requests_post.reset_mock() requests_get.reset_mock() - endpoint = endpoint_base + '?name_id=%s&user_id=53&information=%s&index=42' % (NAMEID, information) - requests_get.return_value = utils.FakedResponse(content=APAINFOS[information], status_code=200) + endpoint = endpoint_base + '?name_id=%s&user_id=53&information=%s&index=42' % ( + NAMEID, + information, + ) + requests_get.return_value = utils.FakedResponse( + content=APAINFOS[information], status_code=200 + ) resp = app.get(endpoint, status=200) assert ('/asg/apa/%s/' % information) in requests_get.call_args[0][0] requests_post.assert_called_once() @@ -391,7 +409,10 @@ def test_solis_apa_link_infos_unlink(app, solis): assert resp.json['err'] == 0 assert resp.json['data']['demandeAsg']['demande']['indexDemande'] == 42 - endpoint = endpoint_base + '?name_id=%s&user_id=53&information=%s&index=57' % (NAMEID, information) + endpoint = endpoint_base + '?name_id=%s&user_id=53&information=%s&index=57' % ( + NAMEID, + information, + ) resp = app.get(endpoint, status=200) assert ('/asg/apa/%s/' % information) in requests_get.call_args[0][0] assert requests_post.call_count == 2 @@ -415,8 +436,10 @@ def test_solis_apa_link_infos_unlink(app, solis): assert set([x['id'] for x in resp.json['data']]) == set(['42', '53']) assert resp.json['data'][0]['text'] == 'Mme Pecile PEPPYNE (NPYNEZ)' # user "text" updated in links: - assert [x['text'] for x in SolisAPALink.objects.values('text')] == \ - ['Mme Pecile PEPPYNE (NPYNEZ)', 'Mme Pecile PEPPYNE (NPYNEZ)'] + assert [x['text'] for x in SolisAPALink.objects.values('text')] == [ + 'Mme Pecile PEPPYNE (NPYNEZ)', + 'Mme Pecile PEPPYNE (NPYNEZ)', + ] # unlink endpoint = utils.generic_endpoint_url('solis', 'apa-unlink', slug=solis.slug) @@ -456,8 +479,9 @@ def test_solis_referentiels(app, solis): # full opened access api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(solis) - AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, - resource_pk=solis.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=solis.pk + ) with mock.patch('passerelle.utils.Request.get') as requests_get: requests_get.return_value = utils.FakedResponse(content=DEPARTEMENTS, status_code=200) @@ -510,8 +534,9 @@ def test_solis_referentiels(app, solis): assert 'codePays=79' in called_url assert 'foo=bar' not in called_url - requests_get.return_value = utils.FakedResponse(content='{"nada":0}', status_code=404, - reason='Not found') + requests_get.return_value = utils.FakedResponse( + content='{"nada":0}', status_code=404, reason='Not found' + ) resp = app.get(url + '/foo/bar/', status=200) assert requests_get.call_args[0][0].endswith('/solisapi/referentiels/foo/bar') assert resp.json['err'] == 1 @@ -540,8 +565,9 @@ def test_solis_referential_item(app, solis): # full opened access api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(solis) - AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, - resource_pk=solis.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=solis.pk + ) with mock.patch('passerelle.utils.Request.get') as requests_get: requests_get.return_value = utils.FakedResponse(content=CIVI_INDIVIDU, status_code=200) @@ -552,8 +578,9 @@ def test_solis_referential_item(app, solis): assert resp.json['err'] == 0 assert resp.json['data']['etatCivil']['nom'] == 'NOM' - requests_get.return_value = utils.FakedResponse(content='{"nada":0}', status_code=404, - reason='Not found') + requests_get.return_value = utils.FakedResponse( + content='{"nada":0}', status_code=404, reason='Not found' + ) url = utils.generic_endpoint_url('solis', 'referential-item', slug=solis.slug) resp = app.get(url + '/civi/individu/424242/', status=200) assert requests_get.call_args[0][0].endswith('/solisapi/referentiels/civi/individu/424242/') @@ -570,18 +597,24 @@ def test_solis_referential_item(app, solis): def test_unflat_dict(): - assert unflat({'foo': 'bar', 'two_foo': 'one', 'two_bar': 'two'}) == {'foo': 'bar', 'two': {'foo': 'one', 'bar': 'two'}} + assert unflat({'foo': 'bar', 'two_foo': 'one', 'two_bar': 'two'}) == { + 'foo': 'bar', + 'two': {'foo': 'one', 'bar': 'two'}, + } def test_solis_apa_integration(app, solis): api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(solis) - AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, - resource_pk=solis.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=solis.pk + ) with mock.patch('passerelle.utils.Request.post') as requests_post: + def integration_ok(*args, **kwargs): return utils.FakedResponse(content='', status_code=204) + requests_post.return_value = utils.FakedResponse(content='', status_code=204) # requests_post.side_effect = [utils.FakedResponse(content='', status_code=204)] url = utils.generic_endpoint_url('solis', 'apa-integration', slug=solis.slug) @@ -625,7 +658,8 @@ def test_solis_apa_integration(app, solis): requests_post.reset_mock() requests_post.side_effect = [ utils.FakedResponse(content='{"id": "foo", "nbFichiersAcceptes": 3}', status_code=200), - utils.FakedResponse(content='', status_code=204)] + utils.FakedResponse(content='', status_code=204), + ] demande['file:etat_civil_001.pdf'] = { 'content': 'JVBERmZha2U=', 'content_type': 'application/pdf', @@ -634,14 +668,14 @@ def test_solis_apa_integration(app, solis): demande['file:etat_civil_002.pdf'] = { # jpeg, will be converted to PDF 'content': '/9j/2wBDAAMCAgICAgMCAgIDAwMDBAYEBAQEBAgGBgUGCQgKCgkICQkKDA8MCgsOCw' - 'kJDRENDg8QEBEQCgwSExIQEw8QEBD/yQALCAABAAEBAREA/8wABgAQEAX/2gAIAQEAAD8A0s8g/9k=', + 'kJDRENDg8QEBEQCgwSExIQEw8QEBD/yQALCAABAAEBAREA/8wABgAQEAX/2gAIAQEAAD8A0s8g/9k=', 'content_type': 'image/jpeg', 'filename': 'image.jpg', } demande['file:etat_civil_003.pdf'] = { # transparent png (RGBA), will be converted to RGB and then PDF 'content': 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAACklEQVR4nGMAAQAABQ' - 'ABDQottAAAAABJRU5ErkJggg==', + 'ABDQottAAAAABJRU5ErkJggg==', 'content_type': 'image/png', 'filename': 'image.png', } @@ -671,9 +705,9 @@ def test_solis_apa_integration(app, solis): assert resp.json['err'] == 0 assert resp.json['data'] is None assert resp.json['files_sent'] == {'id': 'foo', 'nbFichiersAcceptes': 3} - assert set(resp.json['files_failed_pdf_conversion']) == set(['etat_civil_004.pdf', - 'etat_civil_005.pdf', - 'etat_civil_006.pdf']) + assert set(resp.json['files_failed_pdf_conversion']) == set( + ['etat_civil_004.pdf', 'etat_civil_005.pdf', 'etat_civil_006.pdf'] + ) # invalid inputs requests_post.reset_mock() @@ -690,7 +724,8 @@ def test_solis_apa_integration(app, solis): requests_post.reset_mock() requests_post.side_effect = [ utils.FakedResponse(content='{"id": "foo", "nbFichiersAcceptes": 0}', status_code=200), - utils.FakedResponse(content='', status_code=204)] + utils.FakedResponse(content='', status_code=204), + ] resp = app.post_json(url, params=demande, status=200) requests_post.assert_called_once() # don't try to post request assert resp.json['err'] == 1 @@ -701,7 +736,8 @@ def test_solis_apa_integration(app, solis): requests_post.reset_mock() requests_post.side_effect = [ utils.FakedResponse(content='{"error": 1}', status_code=500), - utils.FakedResponse(content='', status_code=204)] + utils.FakedResponse(content='', status_code=204), + ] resp = app.post_json(url, params=demande, status=200) requests_post.assert_called_once() # don't try to post request assert resp.json['err'] == 1 @@ -712,8 +748,9 @@ def test_solis_rsa_link_infos_unlink(app, solis): # full opened access api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(solis) - AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, - resource_pk=solis.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=solis.pk + ) # link with mock.patch('passerelle.utils.Request.post') as requests_post: # get solis token @@ -725,8 +762,14 @@ def test_solis_rsa_link_infos_unlink(app, solis): assert resp.json['err'] == 1 assert 'payload is not a JSON dict' in resp.json['err_desc'] - for params in ({}, {'user_id': 'x'}, {'code': 'x'}, {'foo': 'bar'}, - {'name_id': ''}, {'user_id': '', 'code': ''}): + for params in ( + {}, + {'user_id': 'x'}, + {'code': 'x'}, + {'foo': 'bar'}, + {'name_id': ''}, + {'user_id': '', 'code': ''}, + ): resp = app.post_json(endpoint, params=params, status=200) assert requests_post.call_count == 0 assert resp.json['err'] == 1 @@ -738,9 +781,9 @@ def test_solis_rsa_link_infos_unlink(app, solis): assert 'missing user_id/code credentials' in resp.json['err_desc'] requests_post.return_value = utils.FakedResponse(content=RSATOKEN_403, status_code=403) - resp = app.post_json(endpoint, - params={'user_id': 'x', 'code': 'x', 'name_id': NAMEID}, - status=200) + resp = app.post_json( + endpoint, params={'user_id': 'x', 'code': 'x', 'name_id': NAMEID}, status=200 + ) assert requests_post.call_count == 1 assert requests_get.call_count == 0 assert resp.json['err'] == 1 @@ -749,11 +792,10 @@ def test_solis_rsa_link_infos_unlink(app, solis): assert SolisRSALink.objects.count() == 0 requests_post.return_value = utils.FakedResponse(content=RSATOKEN, status_code=200) - requests_get.return_value = utils.FakedResponse(content=CIVI_INDIVIDU, - status_code=200) - resp = app.post_json(endpoint, - params={'name_id': NAMEID, 'user_id': '4273', 'code': 'foo'}, - status=200) + requests_get.return_value = utils.FakedResponse(content=CIVI_INDIVIDU, status_code=200) + resp = app.post_json( + endpoint, params={'name_id': NAMEID, 'user_id': '4273', 'code': 'foo'}, status=200 + ) assert requests_post.call_count == 2 assert requests_get.call_count == 1 assert resp.json['err'] == 0 @@ -768,9 +810,11 @@ def test_solis_rsa_link_infos_unlink(app, solis): assert SolisRSALink.objects.first().dob is None # change code, add dob - resp = app.post_json(endpoint, - params={'name_id': NAMEID, 'user_id': '4273', 'code': 'bar', 'dob': '01/01/1950'}, - status=200) + resp = app.post_json( + endpoint, + params={'name_id': NAMEID, 'user_id': '4273', 'code': 'bar', 'dob': '01/01/1950'}, + status=200, + ) assert requests_post.call_count == 3 assert requests_get.call_count == 2 assert resp.json['err'] == 0 @@ -787,14 +831,18 @@ def test_solis_rsa_link_infos_unlink(app, solis): # second link requests_post.reset_mock() requests_get.reset_mock() - resp = app.post_json(endpoint, - params={'name_id': NAMEID, 'user_id': '4242', 'code': 'bar', 'dob': '10/10/1960'}, - status=200) + resp = app.post_json( + endpoint, + params={'name_id': NAMEID, 'user_id': '4242', 'code': 'bar', 'dob': '10/10/1960'}, + status=200, + ) assert requests_post.call_count == 1 assert requests_get.call_count == 1 - assert requests_post.call_args[1]['json'] == {'codeConfidentiel': 'bar', - 'dateNaissance': '10/10/1960', - 'indexIndividu': '4242'} + assert requests_post.call_args[1]['json'] == { + 'codeConfidentiel': 'bar', + 'dateNaissance': '10/10/1960', + 'indexIndividu': '4242', + } assert resp.json['err'] == 0 assert resp.json['data']['user_id'] == '4242' assert resp.json['data']['created'] @@ -802,8 +850,7 @@ def test_solis_rsa_link_infos_unlink(app, solis): assert SolisRSALink.objects.count() == 2 # verify recorded names after link - assert [x['text'] for x in SolisRSALink.objects.values('text')] == \ - ['MME Prenom NOM', 'MME Prenom NOM'] + assert [x['text'] for x in SolisRSALink.objects.values('text')] == ['MME Prenom NOM', 'MME Prenom NOM'] endpoint = utils.generic_endpoint_url('solis', 'rsa-links', slug=solis.slug) resp = app.get(endpoint, status=400) # missing name_id @@ -826,8 +873,7 @@ def test_solis_rsa_link_infos_unlink(app, solis): assert resp.json['err'] == 0 assert resp.json['data']['etatCivil']['prenom'] == 'Postnom' # user "text" updated in link: - assert SolisRSALink.objects.get(name_id=NAMEID, user_id='4242').text == \ - 'MME Postnom NOM' + assert SolisRSALink.objects.get(name_id=NAMEID, user_id='4242').text == 'MME Postnom NOM' # get referential for a linked user with mock.patch('passerelle.utils.Request.get') as requests_get: @@ -839,28 +885,25 @@ def test_solis_rsa_link_infos_unlink(app, solis): assert resp.json['err'] == 1 endpoint = endpoint_base + '?name_id=%s&user_id=4242&information=allocataires' % NAMEID - requests_get.return_value = utils.FakedResponse(content=RSAALLOCATAIRES, - status_code=200) + requests_get.return_value = utils.FakedResponse(content=RSAALLOCATAIRES, status_code=200) resp = app.get(endpoint, status=200) assert requests_post.call_count == 1 # get a token - assert requests_get.call_count == 1 # get informations + assert requests_get.call_count == 1 # get informations assert '/referentiels/grsa/allocataires/search/' in requests_get.call_args[0][0] assert resp.json['err'] == 0 assert resp.json['data'] # solis api crash - requests_get.return_value = utils.FakedResponse(content='boum', - status_code=500) + requests_get.return_value = utils.FakedResponse(content='boum', status_code=500) resp = app.get(endpoint, status=200) assert requests_post.call_count == 2 # get a token - assert requests_get.call_count == 2 # get informations + assert requests_get.call_count == 2 # get informations assert '/referentiels/grsa/allocataires/search/' in requests_get.call_args[0][0] assert resp.json['err'] == 1 assert resp.json['err_desc'].startswith('error status:500') assert resp.json['data'] == {'json_content': None, 'status_code': 500} - requests_get.return_value = utils.FakedResponse(content='{"error":"foobar"}', - status_code=500) + requests_get.return_value = utils.FakedResponse(content='{"error":"foobar"}', status_code=500) resp = app.get(endpoint, status=200) assert resp.json['err'] == 1 assert resp.json['err_desc'].startswith('error status:500') @@ -880,11 +923,10 @@ def test_solis_rsa_link_infos_unlink(app, solis): requests_post.return_value = utils.FakedResponse(content=RSATOKEN, status_code=200) endpoint_base = utils.generic_endpoint_url('solis', 'rsa-user-info', slug=solis.slug) endpoint = endpoint_base + '?name_id=%s&user_id=4242&information=actions' % NAMEID - requests_get.return_value = utils.FakedResponse(content=RSA_ACTIONS, - status_code=200) + requests_get.return_value = utils.FakedResponse(content=RSA_ACTIONS, status_code=200) resp = app.get(endpoint, status=200) assert requests_post.call_count == 1 # get a token - assert requests_get.call_count == 1 # get actions + assert requests_get.call_count == 1 # get actions assert '/referentiels/grsa/actions/search/' in requests_get.call_args[0][0] assert resp.json['err'] == 0 assert len(resp.json['data']) == 4 # all actions @@ -911,14 +953,14 @@ def test_solis_rsa_link_infos_unlink(app, solis): requests_get.reset_mock() requests_get.side_effect = [ - utils.FakedResponse(status_code=200, content=RSA_ACTIONS), # base info + utils.FakedResponse(status_code=200, content=RSA_ACTIONS), # base info utils.FakedResponse(status_code=200, content='{"nom":"Structure1"}'), # link 1 utils.FakedResponse(status_code=200, content='{"nom":"Structure2"}'), # link 2 ] filtered = endpoint + '&links' resp = app.get(filtered, status=200) assert len(resp.json['data']) == 4 - assert requests_get.call_count == 3 # actions + two _links + assert requests_get.call_count == 3 # actions + two _links assert resp.json['data'][0]['rsa_links']['structure']['content'] == {'nom': 'Structure1'} assert resp.json['data'][2]['rsa_links']['structure']['content'] == {'nom': 'Structure2'} assert 'rsa_links' not in resp.json['data'][1] @@ -932,12 +974,13 @@ def test_solis_rsa_link_infos_unlink(app, solis): requests_get.side_effect = [ utils.FakedResponse(status_code=200, content=RSAALLOCATAIRES), # base info - utils.FakedResponse(status_code=200, content=CIVI_INDIVIDU), # link 1 - utils.FakedResponse(status_code=200, content=CIVI_INDIVIDU)] # link 2 + utils.FakedResponse(status_code=200, content=CIVI_INDIVIDU), # link 1 + utils.FakedResponse(status_code=200, content=CIVI_INDIVIDU), + ] # link 2 endpoint = endpoint_base + '?name_id=%s&user_id=4242&information=allocataires&links' % NAMEID resp = app.get(endpoint, status=200) assert requests_post.call_count == 1 # get a token - assert requests_get.call_count == 3 # get informations + two links + assert requests_get.call_count == 3 # get informations + two links assert resp.json['err'] == 0 assert resp.json['data']['rsa_links']['etatCivil']['content']['index'] == 4273 assert resp.json['data']['rsa_links']['conjoint']['content']['index'] == 4273 @@ -946,14 +989,15 @@ def test_solis_rsa_link_infos_unlink(app, solis): requests_post.reset_mock() requests_get.reset_mock() requests_get.side_effect = [ - utils.FakedResponse(status_code=200, content=RSA_3LINKS), # base info - utils.FakedResponse(status_code=200, content=CIVI_INDIVIDU), # link 1 - utils.FakedResponse(status_code=200, content=CIVI_INDIVIDU), # link 2.1 - utils.FakedResponse(status_code=200, content=CIVI_INDIVIDU)] # link 2.2 + utils.FakedResponse(status_code=200, content=RSA_3LINKS), # base info + utils.FakedResponse(status_code=200, content=CIVI_INDIVIDU), # link 1 + utils.FakedResponse(status_code=200, content=CIVI_INDIVIDU), # link 2.1 + utils.FakedResponse(status_code=200, content=CIVI_INDIVIDU), + ] # link 2.2 endpoint = endpoint_base + '?name_id=%s&user_id=4242&information=evaluations&links' % NAMEID resp = app.get(endpoint, status=200) assert requests_post.call_count == 1 # get a token - assert requests_get.call_count == 4 # get informations + 2+1 links + assert requests_get.call_count == 4 # get informations + 2+1 links assert resp.json['err'] == 0 assert resp.json['data']['rsa_links']['etatCivil']['content']['index'] == 4273 assert resp.json['data']['rsa_links']['refOrientStructAcc'][0]['content']['index'] == 4273 @@ -964,11 +1008,15 @@ def test_solis_rsa_link_infos_unlink(app, solis): requests_get.reset_mock() requests_get.side_effect = [ utils.FakedResponse(status_code=200, content=RSAALLOCATAIRES), # base info - utils.FakedResponse(status_code=200, content=CIVI_INDIVIDU)] # link 1 - endpoint = endpoint_base + '?name_id=%s&user_id=4242&information=allocataires&links=conjoint, ,xx,' % NAMEID + utils.FakedResponse(status_code=200, content=CIVI_INDIVIDU), + ] # link 1 + endpoint = ( + endpoint_base + + '?name_id=%s&user_id=4242&information=allocataires&links=conjoint, ,xx,' % NAMEID + ) resp = app.get(endpoint, status=200) assert requests_post.call_count == 1 # get a token - assert requests_get.call_count == 2 # get informations + conjoint + assert requests_get.call_count == 2 # get informations + conjoint assert resp.json['err'] == 0 assert resp.json['data']['rsa_links']['conjoint']['content']['index'] == 4273 assert 'content' not in resp.json['data']['rsa_links']['etatCivil'] @@ -977,27 +1025,30 @@ def test_solis_rsa_link_infos_unlink(app, solis): requests_post.reset_mock() requests_get.reset_mock() requests_get.side_effect = [ - utils.FakedResponse(status_code=200, content=RSAALLOCATAIRES), # base info + utils.FakedResponse(status_code=200, content=RSAALLOCATAIRES), # base info utils.FakedResponse(status_code=404, content='{"foo": "bar"}'), # link 1 - utils.FakedResponse(status_code=500, content='boom')] # link 2 + utils.FakedResponse(status_code=500, content='boom'), + ] # link 2 endpoint = endpoint_base + '?name_id=%s&user_id=4242&information=allocataires&links' % NAMEID resp = app.get(endpoint, status=200) assert requests_post.call_count == 1 # get a token - assert requests_get.call_count == 3 # get informations + two links + assert requests_get.call_count == 3 # get informations + two links assert resp.json['err'] == 0 assert resp.json['data']['rsa_links']['etatCivil']['content']['err'] == 1 assert resp.json['data']['rsa_links']['conjoint']['content']['err'] == 1 # bad links, do nothing - for content in (RSAALLOCATAIRES.replace('solis.example.net', 'solis.example.org'), - RSAALLOCATAIRES.replace('href', 'xxxx')): + for content in ( + RSAALLOCATAIRES.replace('solis.example.net', 'solis.example.org'), + RSAALLOCATAIRES.replace('href', 'xxxx'), + ): requests_post.reset_mock() requests_get.reset_mock() requests_get.side_effect = [utils.FakedResponse(status_code=200, content=content)] endpoint = endpoint_base + '?name_id=%s&user_id=4242&information=allocataires&links' % NAMEID resp = app.get(endpoint, status=200) assert requests_post.call_count == 1 # get a token - assert requests_get.call_count == 1 # get informations only, not links + assert requests_get.call_count == 1 # get informations only, not links assert resp.json['err'] == 0 assert 'content' not in resp.json['data']['rsa_links']['conjoint'] assert 'content' not in resp.json['data']['rsa_links']['etatCivil'] diff --git a/tests/test_solis_afi_mss.py b/tests/test_solis_afi_mss.py index be6437f1..12e4f255 100644 --- a/tests/test_solis_afi_mss.py +++ b/tests/test_solis_afi_mss.py @@ -28,18 +28,19 @@ from passerelle.utils.jsonresponse import APIError @pytest.fixture def connector(db): - return utils.setup_access_rights(SolisAfiMss.objects.create( - slug='test', - base_url='https://dummy-server.org' - )) + return utils.setup_access_rights( + SolisAfiMss.objects.create(slug='test', base_url='https://dummy-server.org') + ) TEST_BASE_DIR = os.path.join(os.path.dirname(__file__), 'data', 'solis_afi_mss') + def json_get_data(filename): with open(os.path.join(TEST_BASE_DIR, "%s.json" % filename)) as fd: return json.dumps(json.load(fd)) + def response(status_code, content): return utils.FakedResponse(content=content, status_code=status_code) @@ -74,11 +75,14 @@ def get_endpoint(name): @mock.patch('passerelle.utils.Request.get') -@pytest.mark.parametrize('status_code, json_content, a_dict', [ - (200, 'not json', None), - (500, '{"message": "help"}', {'message': 'help'}), - (500, 'not json', None), -]) +@pytest.mark.parametrize( + 'status_code, json_content, a_dict', + [ + (200, 'not json', None), + (500, '{"message": "help"}', {'message': 'help'}), + (500, 'not json', None), + ], +) def test_request_error(mocked_get, app, connector, status_code, json_content, a_dict): mocked_get.side_effect = [response(status_code, json_content)] with pytest.raises(APIError) as exc: @@ -96,11 +100,11 @@ def test_request_error(mocked_get, app, connector, status_code, json_content, a_ def test_check_status(mocked_get, app, connector): mocked_get.side_effect = [IS_ALIVE] connector.check_status() - assert mocked_get.mock_calls == [mock.call( - 'https://dummy-server.org/main/isAlive/', - headers={'Accept': 'application/json'}, - params=None - )] + assert mocked_get.mock_calls == [ + mock.call( + 'https://dummy-server.org/main/isAlive/', headers={'Accept': 'application/json'}, params=None + ) + ] @mock.patch('passerelle.utils.Request.get') @@ -111,39 +115,61 @@ def test_check_status_error(mocked_get, app, connector): @mock.patch('passerelle.utils.Request.get') -@pytest.mark.parametrize('response1, adults, children', [ - (RECHERCHE_PAR_EMAIL_1, [ - (389227, 'Jacques ROUSSEAU'), - (434729, 'Rina DI MARINO'), - ], [ - (389229, 'Lola ROUSSEAU'), - (389230, 'Nicolas ROUSSEAU'), - (389231, 'Mélina ROUSSEAU'), - ]), - (RECHERCHE_PAR_EMAIL_2, [ - (388412, 'Louise PIED'), - ], [ - (388413, 'KEVIN PIED'), - ]), - (RECHERCHE_PAR_EMAIL_3, [ - (388420, 'Marie-Noëlle BASDEVANT'), - (434728, 'PIETRO BARTOLOMEO'), - ], []), - (RECHERCHE_PAR_EMAIL_4, [ - (388405, 'Jean-Christophe HUREL'), - (434727, 'CAROLE HUREL'), - ], [ - (388407, 'Camille HUREL'), - (388408, 'Valentin HUREL'), - ]), -]) +@pytest.mark.parametrize( + 'response1, adults, children', + [ + ( + RECHERCHE_PAR_EMAIL_1, + [ + (389227, 'Jacques ROUSSEAU'), + (434729, 'Rina DI MARINO'), + ], + [ + (389229, 'Lola ROUSSEAU'), + (389230, 'Nicolas ROUSSEAU'), + (389231, 'Mélina ROUSSEAU'), + ], + ), + ( + RECHERCHE_PAR_EMAIL_2, + [ + (388412, 'Louise PIED'), + ], + [ + (388413, 'KEVIN PIED'), + ], + ), + ( + RECHERCHE_PAR_EMAIL_3, + [ + (388420, 'Marie-Noëlle BASDEVANT'), + (434728, 'PIETRO BARTOLOMEO'), + ], + [], + ), + ( + RECHERCHE_PAR_EMAIL_4, + [ + (388405, 'Jean-Christophe HUREL'), + (434727, 'CAROLE HUREL'), + ], + [ + (388407, 'Camille HUREL'), + (388408, 'Valentin HUREL'), + ], + ), + ], +) def test_search_from_email(mocked_get, app, connector, response1, adults, children): mocked_get.side_effect = [response1] result = connector.search_from_email('foo@dummy.org') - assert mocked_get.mock_calls == [mock.call( - 'https://dummy-server.org/afi/agent/rechercherParEmail/', - headers={'Accept': 'application/json'}, - params={'adresseMail': 'foo@dummy.org'})] + assert mocked_get.mock_calls == [ + mock.call( + 'https://dummy-server.org/afi/agent/rechercherParEmail/', + headers={'Accept': 'application/json'}, + params={'adresseMail': 'foo@dummy.org'}, + ) + ] assert result[0] == adults[0][0] # agent index assert [(x['id'], x['text']) for x in result[1]] == adults assert [(x['id'], x['text']) for x in result[2]] == children @@ -155,36 +181,54 @@ def test_search_from_email_error(mocked_get, app, connector): with pytest.raises(APIError) as exc: connector.search_from_email('foo@dummy.org') assert str(exc.value) == "L'adresse mail n'appartient à aucun agent" - assert mocked_get.mock_calls == [mock.call( - 'https://dummy-server.org/afi/agent/rechercherParEmail/', - headers={'Accept': 'application/json'}, - params={'adresseMail': 'foo@dummy.org'})] + assert mocked_get.mock_calls == [ + mock.call( + 'https://dummy-server.org/afi/agent/rechercherParEmail/', + headers={'Accept': 'application/json'}, + params={'adresseMail': 'foo@dummy.org'}, + ) + ] @mock.patch('passerelle.utils.Request.get') -@pytest.mark.parametrize('response1, family', [ - (RECHERCHE_PAR_EMAIL_1, [ - (389227, 'Jacques ROUSSEAU'), - (434729, 'Rina DI MARINO'), - (389229, 'Lola ROUSSEAU'), - (389230, 'Nicolas ROUSSEAU'), - (389231, 'Mélina ROUSSEAU'), - ]), - (RECHERCHE_PAR_EMAIL_2, [ - (388412, 'Louise PIED'), - (388413, 'KEVIN PIED'), - ]), - (RECHERCHE_PAR_EMAIL_3, [ - (388420, 'Marie-Noëlle BASDEVANT'), - (434728, 'PIETRO BARTOLOMEO'), - ]), - (RECHERCHE_PAR_EMAIL_4, [ - (388405, 'Jean-Christophe HUREL'), - (434727, 'CAROLE HUREL'), - (388407, 'Camille HUREL'), - (388408, 'Valentin HUREL'), - ]), -]) +@pytest.mark.parametrize( + 'response1, family', + [ + ( + RECHERCHE_PAR_EMAIL_1, + [ + (389227, 'Jacques ROUSSEAU'), + (434729, 'Rina DI MARINO'), + (389229, 'Lola ROUSSEAU'), + (389230, 'Nicolas ROUSSEAU'), + (389231, 'Mélina ROUSSEAU'), + ], + ), + ( + RECHERCHE_PAR_EMAIL_2, + [ + (388412, 'Louise PIED'), + (388413, 'KEVIN PIED'), + ], + ), + ( + RECHERCHE_PAR_EMAIL_3, + [ + (388420, 'Marie-Noëlle BASDEVANT'), + (434728, 'PIETRO BARTOLOMEO'), + ], + ), + ( + RECHERCHE_PAR_EMAIL_4, + [ + (388405, 'Jean-Christophe HUREL'), + (434727, 'CAROLE HUREL'), + (388407, 'Camille HUREL'), + (388408, 'Valentin HUREL'), + ], + ), + ], +) def test_family(mocked_get, app, connector, response1, family): mocked_get.side_effect = [response1] endpoint = get_endpoint('family') + '?email=foo@dummy.org' @@ -203,12 +247,15 @@ def test_family_error(mocked_get, app, connector): @mock.patch('passerelle.utils.Request.get') -@pytest.mark.parametrize('response1, index, name', [ - (RECHERCHE_PAR_EMAIL_1, 389227, 'Jacques ROUSSEAU'), - (RECHERCHE_PAR_EMAIL_2, 388412, 'Louise PIED'), - (RECHERCHE_PAR_EMAIL_3, 388420, 'Marie-Noëlle BASDEVANT'), - (RECHERCHE_PAR_EMAIL_4, 388405, 'Jean-Christophe HUREL'), -]) +@pytest.mark.parametrize( + 'response1, index, name', + [ + (RECHERCHE_PAR_EMAIL_1, 389227, 'Jacques ROUSSEAU'), + (RECHERCHE_PAR_EMAIL_2, 388412, 'Louise PIED'), + (RECHERCHE_PAR_EMAIL_3, 388420, 'Marie-Noëlle BASDEVANT'), + (RECHERCHE_PAR_EMAIL_4, 388405, 'Jean-Christophe HUREL'), + ], +) def test_agent(mocked_get, app, connector, response1, index, name): mocked_get.side_effect = [response1] endpoint = get_endpoint('agent') + '?email=foo@dummy.org' @@ -220,23 +267,38 @@ def test_agent(mocked_get, app, connector, response1, index, name): @mock.patch('passerelle.utils.Request.get') -@pytest.mark.parametrize('response1, adults', [ - (RECHERCHE_PAR_EMAIL_1, [ - (389227, 'Jacques ROUSSEAU'), - (434729, 'Rina DI MARINO'), - ]), - (RECHERCHE_PAR_EMAIL_2, [ - (388412, 'Louise PIED'), - ]), - (RECHERCHE_PAR_EMAIL_3, [ - (388420, 'Marie-Noëlle BASDEVANT'), - (434728, 'PIETRO BARTOLOMEO'), - ]), - (RECHERCHE_PAR_EMAIL_4, [ - (388405, 'Jean-Christophe HUREL'), - (434727, 'CAROLE HUREL'), - ]), -]) +@pytest.mark.parametrize( + 'response1, adults', + [ + ( + RECHERCHE_PAR_EMAIL_1, + [ + (389227, 'Jacques ROUSSEAU'), + (434729, 'Rina DI MARINO'), + ], + ), + ( + RECHERCHE_PAR_EMAIL_2, + [ + (388412, 'Louise PIED'), + ], + ), + ( + RECHERCHE_PAR_EMAIL_3, + [ + (388420, 'Marie-Noëlle BASDEVANT'), + (434728, 'PIETRO BARTOLOMEO'), + ], + ), + ( + RECHERCHE_PAR_EMAIL_4, + [ + (388405, 'Jean-Christophe HUREL'), + (434727, 'CAROLE HUREL'), + ], + ), + ], +) def test_adults(mocked_get, app, connector, response1, adults): mocked_get.side_effect = [response1] endpoint = get_endpoint('adults') + '?email=foo@dummy.org' @@ -246,22 +308,33 @@ def test_adults(mocked_get, app, connector, response1, adults): @mock.patch('passerelle.utils.Request.get') -@pytest.mark.parametrize('response1, children', [ - (RECHERCHE_PAR_EMAIL_1, [ - (389229, 'Lola ROUSSEAU'), - (389230, 'Nicolas ROUSSEAU'), - (389231, 'Mélina ROUSSEAU'), - ]), - (RECHERCHE_PAR_EMAIL_2, [ - (388413, 'KEVIN PIED'), - ]), - (RECHERCHE_PAR_EMAIL_3, [ - ]), - (RECHERCHE_PAR_EMAIL_4, [ - (388407, 'Camille HUREL'), - (388408, 'Valentin HUREL'), - ]), -]) +@pytest.mark.parametrize( + 'response1, children', + [ + ( + RECHERCHE_PAR_EMAIL_1, + [ + (389229, 'Lola ROUSSEAU'), + (389230, 'Nicolas ROUSSEAU'), + (389231, 'Mélina ROUSSEAU'), + ], + ), + ( + RECHERCHE_PAR_EMAIL_2, + [ + (388413, 'KEVIN PIED'), + ], + ), + (RECHERCHE_PAR_EMAIL_3, []), + ( + RECHERCHE_PAR_EMAIL_4, + [ + (388407, 'Camille HUREL'), + (388408, 'Valentin HUREL'), + ], + ), + ], +) def test_children(mocked_get, app, connector, response1, children): mocked_get.side_effect = [response1] endpoint = get_endpoint('children') + '?email=foo@dummy.org' @@ -271,10 +344,10 @@ def test_children(mocked_get, app, connector, response1, children): @mock.patch('passerelle.utils.Request.get') -@pytest.mark.parametrize('response1, response2, taxes', [ - (RECHERCHE_PAR_EMAIL_4, GET_IMPOSITION_PAR_AGENT_4, - [(2018, '2018: 15000'), (2019, '2019: 1000')]) -]) +@pytest.mark.parametrize( + 'response1, response2, taxes', + [(RECHERCHE_PAR_EMAIL_4, GET_IMPOSITION_PAR_AGENT_4, [(2018, '2018: 15000'), (2019, '2019: 1000')])], +) def test_taxes(mocked_get, app, connector, response1, response2, taxes): mocked_get.side_effect = [response1, response2] endpoint = get_endpoint('taxes') + '?email=foo@dummy.org' @@ -282,17 +355,21 @@ def test_taxes(mocked_get, app, connector, response1, response2, taxes): assert mocked_get.mock_calls[1] == mock.call( 'https://dummy-server.org/afi/budget/getImpositionsParAgent/', headers={'Accept': 'application/json'}, - params={'indexAgent': str(json.loads(response1.content)['indexAgent'])}) + params={'indexAgent': str(json.loads(response1.content)['indexAgent'])}, + ) assert not resp.json['err'] assert [(x['id'], x['text']) for x in resp.json['data']] == taxes @mock.patch('passerelle.utils.Request.get') -@pytest.mark.parametrize('response1, response2, tax', [ - (RECHERCHE_PAR_EMAIL_4, GET_IMPOSITION_4, 1000), - (RECHERCHE_PAR_EMAIL_4, GET_IMPOSITION_NONE, None), - (RECHERCHE_PAR_EMAIL_4, GET_IMPOSITION_204, None), -]) +@pytest.mark.parametrize( + 'response1, response2, tax', + [ + (RECHERCHE_PAR_EMAIL_4, GET_IMPOSITION_4, 1000), + (RECHERCHE_PAR_EMAIL_4, GET_IMPOSITION_NONE, None), + (RECHERCHE_PAR_EMAIL_4, GET_IMPOSITION_204, None), + ], +) def test_taxes_for_year(mocked_get, app, connector, response1, response2, tax): mocked_get.side_effect = [response1, response2] endpoint = get_endpoint('taxes') + '?email=foo@dummy.org&year=2019' @@ -300,9 +377,8 @@ def test_taxes_for_year(mocked_get, app, connector, response1, response2, tax): assert mocked_get.mock_calls[1] == mock.call( 'https://dummy-server.org/afi/budget/getImposition/', headers={'Accept': 'application/json'}, - params={ - 'indexAgent': str(json.loads(response1.content)['indexAgent']), - 'anneeImposition': '2019'}) + params={'indexAgent': str(json.loads(response1.content)['indexAgent']), 'anneeImposition': '2019'}, + ) assert not resp.json['err'] if tax: assert len(resp.json['data']) == 1 @@ -312,9 +388,7 @@ def test_taxes_for_year(mocked_get, app, connector, response1, response2, tax): @mock.patch('passerelle.utils.Request.get') -@pytest.mark.parametrize('response1, response2', [ - (RECHERCHE_PAR_EMAIL_NONE, None) -]) +@pytest.mark.parametrize('response1, response2', [(RECHERCHE_PAR_EMAIL_NONE, None)]) def test_taxes_error(mocked_get, app, connector, response1, response2): mocked_get.side_effect = [response1, response2] endpoint = get_endpoint('taxes') + '?email=foo@dummy.org' @@ -325,9 +399,12 @@ def test_taxes_error(mocked_get, app, connector, response1, response2): @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -@pytest.mark.parametrize('response1, response2', [ - (RECHERCHE_PAR_EMAIL_1, DECLARER_IMPOT_1), -]) +@pytest.mark.parametrize( + 'response1, response2', + [ + (RECHERCHE_PAR_EMAIL_1, DECLARER_IMPOT_1), + ], +) def test_declare_tax(mocked_post, mocked_get, app, connector, response1, response2): mocked_get.side_effect = [response1] mocked_post.side_effect = [response2] @@ -340,16 +417,19 @@ def test_declare_tax(mocked_post, mocked_get, app, connector, response1, respons 'montantImposition': '777.77', } resp = app.post_json(endpoint, params=payload) - assert mocked_post.mock_calls == [mock.call( - 'https://dummy-server.org/afi/budget/declarerImpot/', - headers={'Accept': 'application/json'}, - json={ - 'indexAgent': '389227', - 'indexImposition': '368', - 'anneeImposition': '2011', - 'nombrePartImposition': '3.2', - 'montantImposition': '777.77', - })] + assert mocked_post.mock_calls == [ + mock.call( + 'https://dummy-server.org/afi/budget/declarerImpot/', + headers={'Accept': 'application/json'}, + json={ + 'indexAgent': '389227', + 'indexImposition': '368', + 'anneeImposition': '2011', + 'nombrePartImposition': '3.2', + 'montantImposition': '777.77', + }, + ) + ] data = json.loads(response2.content) data.pop('err') data.pop('err_desc') @@ -358,10 +438,13 @@ def test_declare_tax(mocked_post, mocked_get, app, connector, response1, respons @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -@pytest.mark.parametrize('response1, response2', [ - (RECHERCHE_PAR_EMAIL_NONE, None), - (RECHERCHE_PAR_EMAIL_4, DECLARER_IMPOT_500), -]) +@pytest.mark.parametrize( + 'response1, response2', + [ + (RECHERCHE_PAR_EMAIL_NONE, None), + (RECHERCHE_PAR_EMAIL_4, DECLARER_IMPOT_500), + ], +) def test_declare_tax_error(mocked_post, mocked_get, app, connector, response1, response2): mocked_get.side_effect = [response1] mocked_post.side_effect = [response2] @@ -382,17 +465,23 @@ def test_declare_tax_error(mocked_post, mocked_get, app, connector, response1, r @mock.patch('passerelle.utils.Request.get') -@pytest.mark.parametrize('response1, ratio', [ - (CALCULER, 52.33), -]) +@pytest.mark.parametrize( + 'response1, ratio', + [ + (CALCULER, 52.33), + ], +) def test_simulate_quotient(mocked_get, app, connector, response1, ratio): mocked_get.side_effect = [response1] endpoint = get_endpoint('simulate-quotient') + '?code=2&nb_parts=2.2&amount=222.22' resp = app.get(endpoint) - assert mocked_get.mock_calls == [mock.call( - 'https://dummy-server.org/afi/budget/calculer/', - headers={'Accept': 'application/json'}, - params={'codeCalcul': '2', 'nbrPartImposition': '2.2', 'mntImposition': '222.22'})] + assert mocked_get.mock_calls == [ + mock.call( + 'https://dummy-server.org/afi/budget/calculer/', + headers={'Accept': 'application/json'}, + params={'codeCalcul': '2', 'nbrPartImposition': '2.2', 'mntImposition': '222.22'}, + ) + ] data = json.loads(response1.content) data.pop('err') data.pop('err_desc') @@ -410,17 +499,25 @@ def test_simulate_quotient_error(mocked_get, app, connector): @mock.patch('passerelle.utils.Request.get') -@pytest.mark.parametrize('response1, response2, helps', [ - (RECHERCHE_PAR_EMAIL_4, GET_AIDES_PAR_AGENT_1, []), - (RECHERCHE_PAR_EMAIL_4, GET_AIDES_PAR_AGENT_4, [ - (37145, '2020-05-26 (En attente)'), - (37146, '2020-05-26 (En attente)'), - (37149, '2020-06-11 (En attente)'), - (37152, '2020-09-29 (En attente)'), - (37153, '2020-09-29 (En attente)'), - (37154, '2020-09-29 (En attente)'), - (37155, '2020-09-29 (En attente)')]), -]) +@pytest.mark.parametrize( + 'response1, response2, helps', + [ + (RECHERCHE_PAR_EMAIL_4, GET_AIDES_PAR_AGENT_1, []), + ( + RECHERCHE_PAR_EMAIL_4, + GET_AIDES_PAR_AGENT_4, + [ + (37145, '2020-05-26 (En attente)'), + (37146, '2020-05-26 (En attente)'), + (37149, '2020-06-11 (En attente)'), + (37152, '2020-09-29 (En attente)'), + (37153, '2020-09-29 (En attente)'), + (37154, '2020-09-29 (En attente)'), + (37155, '2020-09-29 (En attente)'), + ], + ), + ], +) def test_helps(mocked_get, app, connector, response1, response2, helps): mocked_get.side_effect = [response1, response2] endpoint = get_endpoint('helps') + '?email=foo@dummy.org' @@ -428,16 +525,20 @@ def test_helps(mocked_get, app, connector, response1, response2, helps): assert mocked_get.mock_calls[1] == mock.call( 'https://dummy-server.org/afi/aide/getAidesParAgent/', headers={'Accept': 'application/json'}, - params={'indexAgent': str(json.loads(response1.content)['indexAgent'])}) + params={'indexAgent': str(json.loads(response1.content)['indexAgent'])}, + ) assert not resp.json['err'] assert [(x['id'], x['text']) for x in resp.json['data']] == helps @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -@pytest.mark.parametrize('response1, response2', [ - (RECHERCHE_PAR_EMAIL_4, DEPOSER_4), -]) +@pytest.mark.parametrize( + 'response1, response2', + [ + (RECHERCHE_PAR_EMAIL_4, DEPOSER_4), + ], +) def test_demand_help(mocked_post, mocked_get, app, connector, response1, response2): mocked_get.side_effect = [response1] mocked_post.side_effect = [response2] @@ -453,18 +554,21 @@ def test_demand_help(mocked_post, mocked_get, app, connector, response1, respons 'montantFacture': '2222.22', } resp = app.post_json(endpoint, params=payload) - assert mocked_post.mock_calls == [mock.call( - 'https://dummy-server.org/afi/aide/deposer/', - headers={'Accept': 'application/json'}, - json={ - 'indexAgent': '388405', - 'codeTypeAide': '24', - 'natureTypeAide': 'A', - 'individusConcernes': [{'indexIndividu': '388407'}, {'indexIndividu': '388408'}], - 'dateDebut': '2020-07-15', - 'dateFin': '2020-07-31', - 'montantFacture': '2222.22' - })] + assert mocked_post.mock_calls == [ + mock.call( + 'https://dummy-server.org/afi/aide/deposer/', + headers={'Accept': 'application/json'}, + json={ + 'indexAgent': '388405', + 'codeTypeAide': '24', + 'natureTypeAide': 'A', + 'individusConcernes': [{'indexIndividu': '388407'}, {'indexIndividu': '388408'}], + 'dateDebut': '2020-07-15', + 'dateFin': '2020-07-31', + 'montantFacture': '2222.22', + }, + ) + ] data = json.loads(response2.content) data.pop('err') data.pop('err_desc') @@ -473,9 +577,12 @@ def test_demand_help(mocked_post, mocked_get, app, connector, response1, respons @mock.patch('passerelle.utils.Request.get') @mock.patch('passerelle.utils.Request.post') -@pytest.mark.parametrize('response1, response2', [ - (RECHERCHE_PAR_EMAIL_NONE, None), -]) +@pytest.mark.parametrize( + 'response1, response2', + [ + (RECHERCHE_PAR_EMAIL_NONE, None), + ], +) def test_demand_help_error(mocked_post, mocked_get, app, connector, response1, response2): mocked_get.side_effect = [response1] mocked_post.side_effect = [response2] diff --git a/tests/test_solis_apa.py b/tests/test_solis_apa.py index 4ad63f15..fed1a565 100644 --- a/tests/test_solis_apa.py +++ b/tests/test_solis_apa.py @@ -24,26 +24,29 @@ def json_get_data(filename): @pytest.fixture def setup(db): - api = ApiUser.objects.create(username='all', - keytype='', key='') - solis = SolisAPA.objects.create(base_url='https://whateever.com/rec/', - slug='test') + api = ApiUser.objects.create(username='all', keytype='', key='') + solis = SolisAPA.objects.create(base_url='https://whateever.com/rec/', slug='test') obj_type = ContentType.objects.get_for_model(solis) - AccessRight.objects.create(codename='can_access', apiuser=api, - resource_type=obj_type, resource_pk=solis.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=solis.pk + ) @pytest.fixture def url(): def get_url(name): return reverse('solis-apa-%s' % name, kwargs={'slug': 'test'}) + return get_url -@pytest.fixture(params=[ - json_get_data('premiere_demande_apa_domicile.json'), - json_get_data('premiere_demande_apa_etablissement.json'), - json_get_data('premiere_demande_apa_etablissement_papier.json')]) +@pytest.fixture( + params=[ + json_get_data('premiere_demande_apa_domicile.json'), + json_get_data('premiere_demande_apa_etablissement.json'), + json_get_data('premiere_demande_apa_etablissement_papier.json'), + ] +) def formdata(request): return request.param @@ -61,8 +64,9 @@ def test_suivi_error(mocked_post, setup, app): def test_instegration_demande_apa_domicile(mocked_post, setup, app, url): fake_response = '{"ImportIdResults":{"Items":[{"key":"indexDossier","value":359043},{"key":"indexBeneficiaire","value":458238},{"key":"indexDemande","value":221155}]}}' - mocked_post.return_value = mock.Mock(status_code=200, content=fake_response, - json=lambda: json.loads(fake_response)) + mocked_post.return_value = mock.Mock( + status_code=200, content=fake_response, json=lambda: json.loads(fake_response) + ) params = json_get_data('premiere_demande_apa_domicile.json') resp = app.post_json(url('integration'), params=params, status=200) @@ -81,11 +85,13 @@ def test_instegration_demande_apa_domicile(mocked_post, setup, app, url): def test_integration_demande_apa_etablissement(mocked_post, setup, app, url): fake_response = '{"ImportIdResults":{"Items":[{"key":"indexDossier","value":359043},{"key":"indexBeneficiaire","value":458238},{"key":"indexDemande","value":221155}]}}' - mocked_post.return_value = mock.Mock(status_code=200, content=fake_response, - json=lambda: json.loads(fake_response)) + mocked_post.return_value = mock.Mock( + status_code=200, content=fake_response, json=lambda: json.loads(fake_response) + ) - resp = app.post_json(url('integration'), - params=json_get_data('premiere_demande_apa_etablissement.json'), status=200) + resp = app.post_json( + url('integration'), params=json_get_data('premiere_demande_apa_etablissement.json'), status=200 + ) resp.json['data']['indexDossier'] == 359043 resp.json['data']['indexBeneficiaire'] == 458238 diff --git a/tests/test_strasbourg_eu.py b/tests/test_strasbourg_eu.py index 66d4432a..8eba3528 100644 --- a/tests/test_strasbourg_eu.py +++ b/tests/test_strasbourg_eu.py @@ -146,9 +146,11 @@ def notification_add_success_mock(url, request): def notification_add_error_mock(url, request): return {'content': ERROR_EXAMPLE, 'request': request, 'status_code': 200} + def unauthorized_mock(url, request): return {'content': UNAUTHORIZED_EXAMPLE, 'request': request, 'status_code': 200} + def favorites_mock(url, request): if url.path.endswith('/get-user-favorites'): return {'content': FAVORITES_EXAMPLE, 'request': request, 'status_code': 200} @@ -167,8 +169,9 @@ def strasbourg_eu(db): connector = StrasbourgEu.objects.create(slug='foobar', liferay_api_url='http://example.net/api/') api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(connector) - AccessRight.objects.create(codename='can_access', apiuser=api, - resource_type=obj_type, resource_pk=connector.pk) + AccessRight.objects.create( + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=connector.pk + ) return connector @@ -209,13 +212,19 @@ def test_notifications(app, strasbourg_eu, caplog): with HTTMock(notifications_mock): resp = app.get(endpoint, status=400) - records = [record for record in caplog.records if - record.msg.startswith('received invalid publicationDate for notification')] + records = [ + record + for record in caplog.records + if record.msg.startswith('received invalid publicationDate for notification') + ] assert len(records) == 0 resp = app.get(endpoint + '?name_id=xxx') assert len(resp.json['notifications']) == 2 - records = [record for record in caplog.records if - record.msg.startswith('received invalid publicationDate for notification')] + records = [ + record + for record in caplog.records + if record.msg.startswith('received invalid publicationDate for notification') + ] assert len(records) == 1 with HTTMock(notification_add_success_mock): diff --git a/tests/test_tcl.py b/tests/test_tcl.py index 050f4190..1bda8041 100644 --- a/tests/test_tcl.py +++ b/tests/test_tcl.py @@ -9,113 +9,107 @@ from passerelle.contrib.tcl.models import Tcl, Line, Stop import utils LIGNE_BUS = { - "values" : [ - { - "indice" : "", - "last_update_fme" : "2017-06-27 06:01:10", - "infos" : "", - "couleur" : "164 203 38", - "libelle" : "Croix Rousse - Plateaux de St Rambert", - "last_update" : "None", - "code_titan" : "2Aa1", - "gid" : "1003", - "ligne" : "2", - "ut" : "UTV", - "sens" : "Aller" - }, - ] + "values": [ + { + "indice": "", + "last_update_fme": "2017-06-27 06:01:10", + "infos": "", + "couleur": "164 203 38", + "libelle": "Croix Rousse - Plateaux de St Rambert", + "last_update": "None", + "code_titan": "2Aa1", + "gid": "1003", + "ligne": "2", + "ut": "UTV", + "sens": "Aller", + }, + ] } LIGNE_TRAM = { - "values" : [ - { - "ligne" : "T2", - "sens" : "Retour", - "libelle" : "St Priest Bel Air - Perrache", - "indice" : "", - "ut" : "UTT", - "couleur" : "54 0 160", - "infos" : "", - "gid" : "4", - "last_update_fme" : "2017-06-27 06:01:10", - "code_titan" : "T2r2", - "last_update" : "None" - }, - ] + "values": [ + { + "ligne": "T2", + "sens": "Retour", + "libelle": "St Priest Bel Air - Perrache", + "indice": "", + "ut": "UTT", + "couleur": "54 0 160", + "infos": "", + "gid": "4", + "last_update_fme": "2017-06-27 06:01:10", + "code_titan": "T2r2", + "last_update": "None", + }, + ] } LIGNE_MF = { - "values" : [ - { - "last_update_fme" : "2017-06-27 06:01:10", - "ligne" : "B", - "ut" : "UTMA", - "indice" : "", - "couleur" : "0 170 227", - "libelle" : "Charpennes - Oullins Gare", - "last_update" : "None", - "gid" : "17", - "code_titan" : "302Aa1", - "infos" : "", - "sens" : "Aller" - }, - ] + "values": [ + { + "last_update_fme": "2017-06-27 06:01:10", + "ligne": "B", + "ut": "UTMA", + "indice": "", + "couleur": "0 170 227", + "libelle": "Charpennes - Oullins Gare", + "last_update": "None", + "gid": "17", + "code_titan": "302Aa1", + "infos": "", + "sens": "Aller", + }, + ] } ARRETS = { - "type" : "FeatureCollection", - "features" : [ - { - "geometry" : { - "coordinates" : [ - 4.84756760746877, - 45.7594333137236 - ], - "type" : "Point" - }, - "type" : "Feature", - "properties" : { - "pmr" : "t", - "id" : "46026", - "nom" : "Place Guichard", - "last_update_fme" : "2017-07-10 06:00:28", - "last_update" : "", - "gid" : "92", - "desserte" : "302A:R", - "ascenseur" : "f", - "escalator" : "f" - } - }, - ] + "type": "FeatureCollection", + "features": [ + { + "geometry": {"coordinates": [4.84756760746877, 45.7594333137236], "type": "Point"}, + "type": "Feature", + "properties": { + "pmr": "t", + "id": "46026", + "nom": "Place Guichard", + "last_update_fme": "2017-07-10 06:00:28", + "last_update": "", + "gid": "92", + "desserte": "302A:R", + "ascenseur": "f", + "escalator": "f", + }, + }, + ], } PASSAGES = { - "values" : [ - { - "last_update_fme" : "2017-06-27 07:32:27", - "coursetheorique" : "302A-019AT:53:1:14", - "ligne" : "302A", - "direction" : "Gare d'Oullins", - "gid" : "12429", - "idtarretdestination" : "46035", - "heurepassage" : "2017-06-27 07:33:50", - "delaipassage" : "1 min", - "id" : "46026", - "type" : "E" - }, - { - "gid" : "12430", - "direction" : "Gare d'Oullins", - "idtarretdestination" : "46035", - "ligne" : "302A", - "last_update_fme" : "2017-06-27 07:32:27", - "coursetheorique" : "302A-019AT:61:1:3", - "id" : "46026", - "delaipassage" : "4 min", - "type" : "E", - "heurepassage" : "2017-06-27 07:36:55" - }, - ] + "values": [ + { + "last_update_fme": "2017-06-27 07:32:27", + "coursetheorique": "302A-019AT:53:1:14", + "ligne": "302A", + "direction": "Gare d'Oullins", + "gid": "12429", + "idtarretdestination": "46035", + "heurepassage": "2017-06-27 07:33:50", + "delaipassage": "1 min", + "id": "46026", + "type": "E", + }, + { + "gid": "12430", + "direction": "Gare d'Oullins", + "idtarretdestination": "46035", + "ligne": "302A", + "last_update_fme": "2017-06-27 07:32:27", + "coursetheorique": "302A-019AT:61:1:3", + "id": "46026", + "delaipassage": "4 min", + "type": "E", + "heurepassage": "2017-06-27 07:36:55", + }, + ] } @@ -123,6 +117,7 @@ PASSAGES = { def connector(db): return utils.setup_access_rights(Tcl.objects.create(slug='test')) + def tcl_responses(url, **kwargs): content = { '/tcllignebus': LIGNE_BUS, @@ -133,6 +128,7 @@ def tcl_responses(url, **kwargs): }.get(urlparse.urlparse(url).path) return utils.FakedResponse(json=lambda: copy.deepcopy(content), status_code=200) + @mock.patch('passerelle.utils.Request.get') def test_cron(mocked_get, app, connector): mocked_get.side_effect = tcl_responses @@ -164,6 +160,7 @@ def test_stop_info(mocked_get, app, connector): assert resp.json['data']['passings_by_line'][0]['ligne'] == 'B' assert len(resp.json['data']['passings_by_line'][0]['passings']) == 2 + @mock.patch('passerelle.utils.Request.get') def test_availability(mocked_get, app, connector): mocked_get.side_effect = tcl_responses diff --git a/tests/test_teamnet_axel.py b/tests/test_teamnet_axel.py index d0cd75c7..dccb1900 100644 --- a/tests/test_teamnet_axel.py +++ b/tests/test_teamnet_axel.py @@ -12,11 +12,21 @@ from passerelle.utils.jsonresponse import APIError @pytest.fixture def setup(db): return utils.make_resource( - TeamnetAxel, **{ - 'slug': 'test', 'wsdl_url': 'http://example.net/AXEL_WS/AxelWS.php?wsdl', + TeamnetAxel, + **{ + 'slug': 'test', + 'wsdl_url': 'http://example.net/AXEL_WS/AxelWS.php?wsdl', 'billing_regies': { - "11": "EN2-CLASSE", "27": "EN10-FM", "37": "EN3-DONS", "31": "EN31-C.V.", - "42": "EN29-RESTC", "43": "EN32-ENFAN", "38": "EN30-PRODD"}}) + "11": "EN2-CLASSE", + "27": "EN10-FM", + "37": "EN3-DONS", + "31": "EN31-C.V.", + "42": "EN29-RESTC", + "43": "EN32-ENFAN", + "38": "EN30-PRODD", + }, + }, + ) AUTH_RESPONSE = ''' @@ -183,20 +193,20 @@ def test_endpoint_ping(soap_client, app, setup): assert resp.json['data']['ping'] == 'pong' -@mock.patch('passerelle.contrib.teamnet_axel.soap.get_client', - side_effect=ConnectionError('Mocked Request ConnectionError')) +@mock.patch( + 'passerelle.contrib.teamnet_axel.soap.get_client', + side_effect=ConnectionError('Mocked Request ConnectionError'), +) def test_endpoint_ping_with_connection_error(soap_client, app, setup): resp = app.get('/teamnet-axel/test/ping/') assert resp.json['err_desc'] == 'Client Error: Mocked Request ConnectionError' def get_client(authenticated=True): - return mock.Mock( - service=MockedService(authenticated=authenticated)) + return mock.Mock(service=MockedService(authenticated=authenticated)) class MockedService(mock.Mock): - def getData(self, streamId, xmlParams, user): if streamId == 'ConnexionCompteFamille': return AUTH_RESPONSE.format(str(self.authenticated).lower()) diff --git a/tests/test_templatetags.py b/tests/test_templatetags.py index c31bf433..63f5093a 100644 --- a/tests/test_templatetags.py +++ b/tests/test_templatetags.py @@ -36,7 +36,9 @@ def test_render_body_schemas(db): for name, method in inspect.getmembers(connector_model, predicate): if not hasattr(method, 'endpoint_info'): continue - if method.endpoint_info.post and method.endpoint_info.post.get('request_body', {}).get('schema'): + if method.endpoint_info.post and method.endpoint_info.post.get('request_body', {}).get( + 'schema' + ): yield method.endpoint_info.post['request_body']['schema'], method schemas = list(collect_schemas()) @@ -55,7 +57,7 @@ def test_render_json_schema(): 'foo': { 'type': 'string', }, - } + }, } # Check that no unicode crash occurs with translation.override('fr'): @@ -63,4 +65,7 @@ def test_render_json_schema(): def test_render_enum_schema(): - assert str(render_json_schema({'enum': [1, "aaa", [1]]})) == '1 | "aaa" | [1]' + assert ( + str(render_json_schema({'enum': [1, "aaa", [1]]})) + == '1 | "aaa" | [1]' + ) diff --git a/tests/test_toulouse_axel.py b/tests/test_toulouse_axel.py index 09259efe..317d3d37 100644 --- a/tests/test_toulouse_axel.py +++ b/tests/test_toulouse_axel.py @@ -56,9 +56,8 @@ import utils @pytest.fixture def resource(db): return utils.make_resource( - ToulouseAxel, - slug='test', - wsdl_url='http://example.net/AXEL_WS/AxelWS.php?wsdl') + ToulouseAxel, slug='test', wsdl_url='http://example.net/AXEL_WS/AxelWS.php?wsdl' + ) @pytest.fixture @@ -84,7 +83,8 @@ def family_data(): filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/family_info.xml') with open(filepath) as xml: content = xml.read() - resp = ''' + resp = ( + ''' @@ -97,7 +97,9 @@ def family_data(): %s - '''.strip() % content + '''.strip() + % content + ) return schemas.ref_famille_dui.response_converter.decode(ET.fromstring(resp))['DATA']['PORTAIL']['DUI'] @@ -106,7 +108,8 @@ def child_activities_data(): filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/child_activities.xml') with open(filepath) as xml: content = xml.read() - resp = ''' + resp = ( + ''' @@ -119,7 +122,9 @@ def child_activities_data(): %s - '''.strip() % content + '''.strip() + % content + ) return schemas.enfants_activites.response_converter.decode(ET.fromstring(resp))['DATA']['PORTAIL']['DUI'] @@ -136,8 +141,18 @@ def booking_params(): return { 'booking_start_date': '2020-04-13', 'booking_end_date': '2020-04-17', - 'booking_list_MAT': ['3535:MAT:A19P1M1:2020-04-13', '3535:MAT:A19P1M1:2020-04-14', '3535:MAT:A19P1M1:2020-04-16', '3535:MAT:A19P1M1:2020-04-17'], - 'booking_list_MIDI': ['3535:MIDI:A19P1M2:2020-04-13', '3535:MIDI:A19P1M2:2020-04-14', '3535:MIDI:A19P1M2:2020-04-16', '3535:MIDI:A19P1M2:2020-04-17'], + 'booking_list_MAT': [ + '3535:MAT:A19P1M1:2020-04-13', + '3535:MAT:A19P1M1:2020-04-14', + '3535:MAT:A19P1M1:2020-04-16', + '3535:MAT:A19P1M1:2020-04-17', + ], + 'booking_list_MIDI': [ + '3535:MIDI:A19P1M2:2020-04-13', + '3535:MIDI:A19P1M2:2020-04-14', + '3535:MIDI:A19P1M2:2020-04-16', + '3535:MIDI:A19P1M2:2020-04-17', + ], 'booking_list_SOIR': ['3535:SOIR:A19P1M3:2020-04-13'], 'booking_list_GARD': ['3535:GARD:A19P1M4:2020-04-15'], 'child_id': '3535', @@ -148,8 +163,18 @@ def booking_params(): @pytest.fixture def annual_booking_params(): return { - 'booking_list_MAT': ['3535:MAT:A19P1M1:monday', '3535:MAT:A19P1M1:tuesday', '3535:MAT:A19P1M1:thursday', '3535:MAT:A19P1M1:friday'], - 'booking_list_MIDI': ['3535:MIDI:A19P1M2:monday', '3535:MIDI:A19P1M2:tuesday', '3535:MIDI:A19P1M2:thursday', '3535:MIDI:A19P1M2:friday'], + 'booking_list_MAT': [ + '3535:MAT:A19P1M1:monday', + '3535:MAT:A19P1M1:tuesday', + '3535:MAT:A19P1M1:thursday', + '3535:MAT:A19P1M1:friday', + ], + 'booking_list_MIDI': [ + '3535:MIDI:A19P1M2:monday', + '3535:MIDI:A19P1M2:tuesday', + '3535:MIDI:A19P1M2:thursday', + '3535:MIDI:A19P1M2:friday', + ], 'booking_list_SOIR': ['3535:SOIR:A19P1M3:monday'], 'booking_list_GARD': ['3535:GARD:A19P1M4:wednesday'], 'child_id': '3535', @@ -265,17 +290,20 @@ def test_operation_status_error(resource): with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.soap_client') as client: client.return_value.service.getData.return_value = resp with pytest.raises(AxelError, match='Foo reason'): - schemas.ref_verif_dui(resource, { - 'PORTAIL': { - 'DUI': { - 'IDDUI': 'XXX', - 'IDPERSONNE': '42', - 'PRENOM': 'John', - 'NOM': 'Doe', - 'NAISSANCE': '2010-10-10', + schemas.ref_verif_dui( + resource, + { + 'PORTAIL': { + 'DUI': { + 'IDDUI': 'XXX', + 'IDPERSONNE': '42', + 'PRENOM': 'John', + 'NOM': 'Doe', + 'NAISSANCE': '2010-10-10', + } } - } - }) + }, + ) @contextmanager @@ -294,160 +322,212 @@ def mock_getdata(content, operation): %s - '''.strip() % (operation, content) + '''.strip() % ( + operation, + content, + ) client.return_value.service.getData.return_value = resp yield -@pytest.mark.parametrize('content', [ - '', -]) +@pytest.mark.parametrize( + 'content', + [ + '', + ], +) def test_operation_ref_date_gestion_dui(resource, content): with mock_getdata(content, 'RefDateGestionDui'): with pytest.raises(AxelError): schemas.ref_date_gestion_dui(resource) -@pytest.mark.parametrize('content', [ - '', - 'foo', - '42', -]) +@pytest.mark.parametrize( + 'content', + [ + '', + 'foo', + '42', + ], +) def test_operation_ref_verif_dui(resource, content): with mock_getdata(content, 'RefVerifDui'): with pytest.raises(AxelError): - schemas.ref_verif_dui(resource, { - 'PORTAIL': { - 'DUI': { - 'PRENOM': 'John', - 'NOM': 'Doe', - 'NAISSANCE': '2010-10-10', + schemas.ref_verif_dui( + resource, + { + 'PORTAIL': { + 'DUI': { + 'PRENOM': 'John', + 'NOM': 'Doe', + 'NAISSANCE': '2010-10-10', + } } - } - }) + }, + ) -@pytest.mark.parametrize('content', [ - '', -]) +@pytest.mark.parametrize( + 'content', + [ + '', + ], +) def test_operation_ref_famille_dui(resource, content): with mock_getdata(content, 'RefFamilleDui'): with pytest.raises(AxelError): - schemas.ref_famille_dui(resource, { - 'PORTAIL': { - 'DUI': { - 'IDDUI': 'XXX', + schemas.ref_famille_dui( + resource, + { + 'PORTAIL': { + 'DUI': { + 'IDDUI': 'XXX', + } } - } - }) + }, + ) -@pytest.mark.parametrize('content', [ - '', -]) +@pytest.mark.parametrize( + 'content', + [ + '', + ], +) def test_operation_form_maj_famille_dui(resource, content): with mock_getdata(content, 'FormMajFamilleDui'): with pytest.raises(AxelError): - schemas.form_maj_famille_dui(resource, { - 'PORTAIL': { - 'DUI': { - 'IDDUI': 'XXX', + schemas.form_maj_famille_dui( + resource, + { + 'PORTAIL': { + 'DUI': { + 'IDDUI': 'XXX', + } } - } - }) + }, + ) -@pytest.mark.parametrize('content', [ - '', -]) +@pytest.mark.parametrize( + 'content', + [ + '', + ], +) def test_operation_ref_facture_a_payer(resource, content): with mock_getdata(content, 'RefFactureAPayer'): with pytest.raises(AxelError): - schemas.ref_facture_a_payer(resource, { - 'PORTAIL': { - 'DUI': { - 'IDDUI': 'XXX', + schemas.ref_facture_a_payer( + resource, + { + 'PORTAIL': { + 'DUI': { + 'IDDUI': 'XXX', + } } - } - }) + }, + ) -@pytest.mark.parametrize('content', [ - '', -]) +@pytest.mark.parametrize( + 'content', + [ + '', + ], +) def test_operation_list_dui_factures(resource, content): with mock_getdata(content, 'ListeDuiFacturesPayeesRecettees'): with pytest.raises(AxelError): - schemas.list_dui_factures(resource, { - 'LISTFACTURE': { - 'NUMDUI': 'XXX', - 'DEBUT': '1970-01-01' - } - }) + schemas.list_dui_factures(resource, {'LISTFACTURE': {'NUMDUI': 'XXX', 'DEBUT': '1970-01-01'}}) -@pytest.mark.parametrize('content', [ - "", -]) +@pytest.mark.parametrize( + 'content', + [ + "", + ], +) def test_operation_ref_facture_pdf(resource, content): with mock_getdata(content, 'RefFacturePDF'): with pytest.raises(AxelError): - schemas.ref_facture_pdf(resource, { - 'PORTAIL': { - 'FACTUREPDF': { - 'IDFACTURE': 42, + schemas.ref_facture_pdf( + resource, + { + 'PORTAIL': { + 'FACTUREPDF': { + 'IDFACTURE': 42, + } } - } - }) + }, + ) -@pytest.mark.parametrize('content', [ - '', -]) +@pytest.mark.parametrize( + 'content', + [ + '', + ], +) def test_operation_form_paiement_dui(resource, content): with mock_getdata(content, 'FormPaiementDui'): with pytest.raises(AxelError): - schemas.form_paiement_dui(resource, { - 'PORTAIL': { - 'DUI': { - 'IDFACTURE': '42', - 'IDREGIEENCAISSEMENT': '', - 'MONTANTPAYE': '42.42', - 'DATEPAIEMENT': '01/01/2020 12:12:12', - 'REFERENCE': '42', + schemas.form_paiement_dui( + resource, + { + 'PORTAIL': { + 'DUI': { + 'IDFACTURE': '42', + 'IDREGIEENCAISSEMENT': '', + 'MONTANTPAYE': '42.42', + 'DATEPAIEMENT': '01/01/2020 12:12:12', + 'REFERENCE': '42', + } } - } - }) + }, + ) -@pytest.mark.parametrize('content', [ - '', -]) +@pytest.mark.parametrize( + 'content', + [ + '', + ], +) def test_operation_enfants_activites(resource, content): with mock_getdata(content, 'EnfantsActivites'): with pytest.raises(AxelError): - schemas.enfants_activites(resource, { - 'DUI': { - 'IDDUI': 'XXX', - 'ANNEEREFERENCE': '2042', - 'TYPESACTIVITES': 'MAT,MIDI,SOIR,GARD', - } - }) + schemas.enfants_activites( + resource, + { + 'DUI': { + 'IDDUI': 'XXX', + 'ANNEEREFERENCE': '2042', + 'TYPESACTIVITES': 'MAT,MIDI,SOIR,GARD', + } + }, + ) -@pytest.mark.parametrize('content', [ - '', -]) +@pytest.mark.parametrize( + 'content', + [ + '', + ], +) def test_operation_reservation_periode(resource, content): with mock_getdata(content, 'ReservationPeriode'): with pytest.raises(AxelError): - schemas.reservation_periode(resource, { - 'PORTAIL': { - 'DUI': { - 'IDDUI': 'XXX', + schemas.reservation_periode( + resource, + { + 'PORTAIL': { + 'DUI': { + 'IDDUI': 'XXX', + } } - } - }) + }, + ) def test_management_dates_endpoint_axel_error(app, resource): @@ -471,29 +551,35 @@ def test_management_dates_endpoint(app, resource): resp = app.get('/toulouse-axel/test/management_dates') assert set(resp.json.keys()) == set(['err', 'data']) assert resp.json['err'] == 0 - assert set(resp.json['data'].keys()) == set([ - 'REPORT-REVENUS', - 'report_revenus', - 'EXTRACTION-FAMILLES', - 'extraction_familles', - 'EXTRACTION-CAFPRO', - 'extraction_cafpro' - ]) + assert set(resp.json['data'].keys()) == set( + [ + 'REPORT-REVENUS', + 'report_revenus', + 'EXTRACTION-FAMILLES', + 'extraction_familles', + 'EXTRACTION-CAFPRO', + 'extraction_cafpro', + ] + ) # again - data are in cache resp = app.get('/toulouse-axel/test/management_dates') assert set(resp.json.keys()) == set(['err', 'data']) assert resp.json['err'] == 0 - assert set(resp.json['data'].keys()) == set([ - 'REPORT-REVENUS', - 'report_revenus', - 'EXTRACTION-FAMILLES', - 'extraction_familles', - 'EXTRACTION-CAFPRO', - 'extraction_cafpro' - ]) + assert set(resp.json['data'].keys()) == set( + [ + 'REPORT-REVENUS', + 'report_revenus', + 'EXTRACTION-FAMILLES', + 'extraction_familles', + 'EXTRACTION-CAFPRO', + 'extraction_cafpro', + ] + ) - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_management_dates') as management_dates: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_management_dates' + ) as management_dates: management_dates.return_value = {'foo': 'bar'} resp = app.get('/toulouse-axel/test/management_dates') assert set(resp.json.keys()) == set(['err', 'data']) @@ -574,7 +660,8 @@ def test_link_endpoint_axel_error(app, resource, link_params): 2 """ - xml_response = """ + xml_response = ( + """ RefVerifDui OK @@ -584,7 +671,9 @@ def test_link_endpoint_axel_error(app, resource, link_params): %s -""" % content +""" + % content + ) with mock_getdata(content, 'RefVerifDui'): with mock.patch('passerelle.contrib.toulouse_axel.schemas.AxelSchema.decode') as decode: decode.side_effect = xmlschema.XMLSchemaValidationError(None, None) @@ -600,17 +689,23 @@ def test_link_endpoint_axel_error(app, resource, link_params): assert resp.json['err_desc'] == "SOAP service is down" -@pytest.mark.parametrize('xml_response', [ - 'XXX0', - 'XXX421', - 'XXX424', -]) +@pytest.mark.parametrize( + 'xml_response', + [ + 'XXX0', + 'XXX421', + 'XXX424', + ], +) def test_link_endpoint_no_result(app, resource, link_params, xml_response): - content = ''' + content = ( + ''' %s -''' % xml_response +''' + % xml_response + ) with mock_getdata(content, 'RefVerifDui'): resp = app.post_json('/toulouse-axel/test/link?NameID=yyy', params=link_params) assert resp.json['err_desc'] == "Person not found" @@ -644,13 +739,16 @@ def test_link_endpoint_conflict(app, resource, link_params): @pytest.mark.parametrize('code', [2, 3]) def test_link_endpoint(app, resource, link_params, code): - content = ''' + content = ( + ''' XXX 42 %s -''' % code +''' + % code + ) with mock_getdata(content, 'RefVerifDui'): resp = app.post_json('/toulouse-axel/test/link?NameID=yyy', params=link_params) assert set(resp.json.keys()) == set(['err', 'link', 'created', 'dui', 'data']) @@ -723,19 +821,27 @@ def test_active_dui_endpoint_wrong_rl(app, resource): assert resp.json['err'] == 'no-rl' -@pytest.mark.parametrize('xml_response,code', [ - ('XXX0', 0), - ('XXX421', 1), - ('XXX424', 4), -]) +@pytest.mark.parametrize( + 'xml_response,code', + [ + ('XXX0', 0), + ('XXX421', 1), + ('XXX424', 4), + ], +) def test_active_dui_endpoint_wrong_dui_code(app, resource, family_data, xml_response, code): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') - content = ''' + content = ( + ''' %s -''' % xml_response - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): +''' + % xml_response + ) + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): with mock_getdata(content, 'RefVerifDui'): resp = app.get('/toulouse-axel/test/active_dui?NameID=yyy') assert resp.json['err_desc'] == "Wrong DUI status" @@ -745,32 +851,39 @@ def test_active_dui_endpoint_wrong_dui_code(app, resource, family_data, xml_resp @pytest.mark.parametrize('code', [2, 3]) def test_active_dui_endpoint(app, resource, family_data, code): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') - content = ''' + content = ( + ''' XXX 42 %s -''' % code - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): +''' + % code + ) + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): with mock_getdata(content, 'RefVerifDui'): resp = app.get('/toulouse-axel/test/active_dui?NameID=yyy') assert resp.json['err'] == 0 - assert set(resp.json['data'].keys()) == set([ - 'ADRESSE', - 'CODEMISEAJOUR', - 'DEMATFACTURES', - 'ENFANT', - 'IDDUI', - 'NBENFANTACTIF', - 'NBRLACTIF', - 'REACTUALISATIONENLIGNE', - 'REVENUS', - 'RL1', - 'RL2', - 'SITUATIONFAMILIALE', - 'TELFIXE', - ]) + assert set(resp.json['data'].keys()) == set( + [ + 'ADRESSE', + 'CODEMISEAJOUR', + 'DEMATFACTURES', + 'ENFANT', + 'IDDUI', + 'NBENFANTACTIF', + 'NBRLACTIF', + 'REACTUALISATIONENLIGNE', + 'REVENUS', + 'RL1', + 'RL2', + 'SITUATIONFAMILIALE', + 'TELFIXE', + ] + ) def test_referential_endpoint_no_result(app, resource): @@ -779,13 +892,16 @@ def test_referential_endpoint_no_result(app, resource): assert resp.json['err'] == 'not-found' -@pytest.mark.parametrize('code, mapping', [ - ('situation_familiale', situation_familiale_mapping), - ('csp', csp_mapping), - ('lien_parente', lien_parente_mapping), - ('type_regime', type_regime_mapping), - ('regime', regime_mapping), -]) +@pytest.mark.parametrize( + 'code, mapping', + [ + ('situation_familiale', situation_familiale_mapping), + ('csp', csp_mapping), + ('lien_parente', lien_parente_mapping), + ('type_regime', type_regime_mapping), + ('regime', regime_mapping), + ], +) def test_referential_endpoint(app, resource, code, mapping): resp = app.get('/toulouse-axel/test/referential/%s/' % code) expected = [{'id': k, 'text': v} for k, v in mapping.items()] @@ -803,7 +919,9 @@ def test_family_info_endpoint_axel_error(app, resource, family_data): filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/family_info.xml') with open(filepath) as xml: content = xml.read() - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_management_dates') as management_dates: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_management_dates' + ) as management_dates: management_dates.side_effect = APIError('Axel error: FooBar') with mock_getdata(content, 'RefFamilleDui'): resp = app.get('/toulouse-axel/test/family_info?NameID=yyy') @@ -822,31 +940,35 @@ def test_family_info_endpoint(app, resource): filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/family_info.xml') with open(filepath) as xml: content = xml.read() - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_management_dates') as management_dates: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_management_dates' + ) as management_dates: management_dates.return_value = {'foo': 'bar'} with mock_getdata(content, 'RefFamilleDui'): resp = app.get('/toulouse-axel/test/family_info?NameID=yyy') assert resp.json['err'] == 0 - assert set(resp.json['data'].keys()) == set([ - 'ADRESSE', - 'CODEMISEAJOUR', - 'DEMATFACTURES', - 'ENFANT', - 'IDDUI', - 'NBENFANTACTIF', - 'NBRLACTIF', - 'REACTUALISATIONENLIGNE', - 'REVENUS', - 'RL1', - 'RL2', - 'SITUATIONFAMILIALE', - 'SITUATIONFAMILIALE_label', - 'TELFIXE', - 'management_dates', - 'annee_reference', - 'annee_reference_short', - 'annee_reference_label', - ]) + assert set(resp.json['data'].keys()) == set( + [ + 'ADRESSE', + 'CODEMISEAJOUR', + 'DEMATFACTURES', + 'ENFANT', + 'IDDUI', + 'NBENFANTACTIF', + 'NBRLACTIF', + 'REACTUALISATIONENLIGNE', + 'REVENUS', + 'RL1', + 'RL2', + 'SITUATIONFAMILIALE', + 'SITUATIONFAMILIALE_label', + 'TELFIXE', + 'management_dates', + 'annee_reference', + 'annee_reference_short', + 'annee_reference_label', + ] + ) assert resp.json['data']['ENFANT'][0]['id'] == '4242' assert resp.json['data']['ENFANT'][0]['text'] == 'foo foo' assert resp.json['data']['ENFANT'][0]['CONTACT'][0]['id'] == 0 @@ -862,9 +984,13 @@ def test_family_info_endpoint(app, resource): assert resp.json['data']['annee_reference_short'] == '19' assert resp.json['data']['annee_reference_label'] == '2019/2020' - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.are_children_registered') as registered: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.are_children_registered' + ) as registered: registered.return_value = {'4242': True} - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_management_dates') as management_dates: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_management_dates' + ) as management_dates: management_dates.return_value = {'foo': 'bar'} with mock_getdata(content, 'RefFamilleDui'): resp = app.get('/toulouse-axel/test/family_info?NameID=yyy') @@ -872,9 +998,13 @@ def test_family_info_endpoint(app, resource): assert resp.json['data']['ENFANT'][0]['clae_cantine_current'] is True assert resp.json['data']['ENFANT'][1]['clae_cantine_current'] is None - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.are_children_registered') as registered: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.are_children_registered' + ) as registered: registered.return_value = {} - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_management_dates') as management_dates: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_management_dates' + ) as management_dates: management_dates.return_value = {'foo': 'bar'} with mock_getdata(content, 'RefFamilleDui'): resp = app.get('/toulouse-axel/test/family_info?NameID=yyy') @@ -899,30 +1029,34 @@ def test_family_info_endpoint(app, resource): filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/family_info_light.xml') with open(filepath) as xml: content = xml.read() - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_management_dates') as management_dates: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_management_dates' + ) as management_dates: management_dates.return_value = {'foo': 'bar'} with mock_getdata(content, 'RefFamilleDui'): resp = app.get('/toulouse-axel/test/family_info?NameID=yyy') assert resp.json['err'] == 0 - assert set(resp.json['data'].keys()) == set([ - 'ADRESSE', - 'CODEMISEAJOUR', - 'DEMATFACTURES', - 'ENFANT', - 'IDDUI', - 'NBENFANTACTIF', - 'NBRLACTIF', - 'REACTUALISATIONENLIGNE', - 'REVENUS', - 'RL1', - 'SITUATIONFAMILIALE', - 'SITUATIONFAMILIALE_label', - 'TELFIXE', - 'management_dates', - 'annee_reference', - 'annee_reference_short', - 'annee_reference_label', - ]) + assert set(resp.json['data'].keys()) == set( + [ + 'ADRESSE', + 'CODEMISEAJOUR', + 'DEMATFACTURES', + 'ENFANT', + 'IDDUI', + 'NBENFANTACTIF', + 'NBRLACTIF', + 'REACTUALISATIONENLIGNE', + 'REVENUS', + 'RL1', + 'SITUATIONFAMILIALE', + 'SITUATIONFAMILIALE_label', + 'TELFIXE', + 'management_dates', + 'annee_reference', + 'annee_reference_short', + 'annee_reference_label', + ] + ) assert resp.json['data']['ADRESSE'] is None @@ -994,24 +1128,26 @@ def test_child_info_endpoint(app, resource): with mock_getdata(content, 'RefFamilleDui'): resp = app.get('/toulouse-axel/test/child_info?NameID=yyy&idpersonne=4242') assert resp.json['err'] == 0 - assert set(resp.json['data'].keys()) == set([ - 'id', - 'text', - 'ASSURANCE', - 'CONTACT', - 'DATENAISSANCE', - 'IDPERSONNE', - 'PRENOM', - 'PRENOMMERE', - 'PRENOMPERE', - 'NOM', - 'NOMMERE', - 'NOMPERE', - 'RATTACHEAUTREDUI', - 'SANITAIRE', - 'SEXE', - 'clae_cantine_current', - ]) + assert set(resp.json['data'].keys()) == set( + [ + 'id', + 'text', + 'ASSURANCE', + 'CONTACT', + 'DATENAISSANCE', + 'IDPERSONNE', + 'PRENOM', + 'PRENOMMERE', + 'PRENOMPERE', + 'NOM', + 'NOMMERE', + 'NOMPERE', + 'RATTACHEAUTREDUI', + 'SANITAIRE', + 'SEXE', + 'clae_cantine_current', + ] + ) assert resp.json['data']['id'] == '4242' assert resp.json['data']['text'] == 'foo foo' assert resp.json['data']['CONTACT'][0]['id'] == 0 @@ -1021,14 +1157,18 @@ def test_child_info_endpoint(app, resource): assert resp.json['data']['CONTACT'][2]['id'] == 2 assert resp.json['data']['CONTACT'][2]['text'] == 'foo3 foo3' - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.are_children_registered') as registered: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.are_children_registered' + ) as registered: registered.return_value = {'4242': True} with mock_getdata(content, 'RefFamilleDui'): resp = app.get('/toulouse-axel/test/child_info?NameID=yyy&idpersonne=4242') assert resp.json['err'] == 0 assert resp.json['data']['clae_cantine_current'] is True - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.are_children_registered') as registered: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.are_children_registered' + ) as registered: registered.return_value = {} with mock_getdata(content, 'RefFamilleDui'): resp = app.get('/toulouse-axel/test/child_info?NameID=yyy&idpersonne=4242') @@ -1106,7 +1246,9 @@ def test_update_family_info_endpoint_axel_error(app, resource, update_params, fa with mock.patch('passerelle.contrib.toulouse_axel.schemas.form_maj_famille_dui') as operation: operation.side_effect = AxelError('FooBar') - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resp = app.post_json('/toulouse-axel/test/update_family_info?NameID=yyy', params=update_params) assert resp.json['err_desc'] == "Axel error: FooBar" assert resp.json['err'] == 'error' @@ -1124,7 +1266,9 @@ def test_update_family_info_endpoint(app, resource, update_params, family_data): content = "" with mock_getdata(content, 'FormMajFamilleDui'): - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resp = app.post_json('/toulouse-axel/test/update_family_info?NameID=yyy', params=update_params) assert resp.json['err'] == 0 assert resp.json['dui'] == 'XXX' @@ -1135,20 +1279,28 @@ def test_update_family_info_endpoint(app, resource, update_params, family_data): with mock.patch('passerelle.contrib.toulouse_axel.schemas.form_maj_famille_dui') as operation: operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resp = app.post_json('/toulouse-axel/test/update_family_info?NameID=yyy', params=update_params) assert operation.call_args_list[0][0][1]['PORTAIL']['DUI']['IDDUI'] == 'XXX' - assert operation.call_args_list[0][0][1]['PORTAIL']['DUI']['DATEDEMANDE'] == datetime.date.today().strftime('%Y-%m-%d') + assert operation.call_args_list[0][0][1]['PORTAIL']['DUI'][ + 'DATEDEMANDE' + ] == datetime.date.today().strftime('%Y-%m-%d') assert operation.call_args_list[0][0][1]['PORTAIL']['DUI']['QUIACTUALISEDUI'] == '1' link.person_id = '35' link.save() with mock.patch('passerelle.contrib.toulouse_axel.schemas.form_maj_famille_dui') as operation: operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resp = app.post_json('/toulouse-axel/test/update_family_info?NameID=yyy', params=update_params) assert operation.call_args_list[0][0][1]['PORTAIL']['DUI']['IDDUI'] == 'XXX' - assert operation.call_args_list[0][0][1]['PORTAIL']['DUI']['DATEDEMANDE'] == datetime.date.today().strftime('%Y-%m-%d') + assert operation.call_args_list[0][0][1]['PORTAIL']['DUI'][ + 'DATEDEMANDE' + ] == datetime.date.today().strftime('%Y-%m-%d') assert operation.call_args_list[0][0][1]['PORTAIL']['DUI']['QUIACTUALISEDUI'] == '2' @@ -1156,8 +1308,12 @@ def test_update_family_info_flat_endpoint(app, resource, flat_update_params, fam Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') content = "" with mock_getdata(content, 'FormMajFamilleDui'): - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): - resp = app.post_json('/toulouse-axel/test/update_family_info?NameID=yyy', params=flat_update_params) + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): + resp = app.post_json( + '/toulouse-axel/test/update_family_info?NameID=yyy', params=flat_update_params + ) assert resp.json['err'] == 0 assert resp.json['dui'] == 'XXX' assert resp.json['updated'] is True @@ -1177,7 +1333,9 @@ def test_sanitize_update_family_data_missing_rl_fields(app, resource, update_par resource.pre_sanitize_update_family_data(post_data=full_update_params) jsonschema.validate(full_update_params, json_schema) - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=full_update_params) for key in ['IDPERSONNE', 'NOM', 'PRENOM', 'NOMJEUNEFILLE', 'DATENAISSANCE', 'CIVILITE']: assert full_update_params['RL1'][key] == family_data['RL1'][key] @@ -1195,7 +1353,9 @@ def test_sanitize_update_family_data_missing_revenus_fields(app, resource, updat resource.pre_sanitize_update_family_data(post_data=full_update_params) jsonschema.validate(full_update_params, json_schema) - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=full_update_params) assert full_update_params['REVENUS']['NBENFANTSACHARGE'] == family_data['REVENUS']['NBENFANTSACHARGE'] @@ -1204,7 +1364,9 @@ def test_sanitize_update_family_data_missing_revenus_fields(app, resource, updat family_data.pop('REVENUS') resource.pre_sanitize_update_family_data(post_data=full_update_params) jsonschema.validate(full_update_params, json_schema) - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=full_update_params) assert full_update_params['REVENUS']['NBENFANTSACHARGE'] is None @@ -1536,7 +1698,9 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_allergie(app, resource, resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) assert '_to_reset' in partial_update_params['ENFANT'][0]['SANITAIRE']['ALLERGIE'] - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) assert len(partial_update_params['ENFANT']) == 1 assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "4242" @@ -1552,7 +1716,9 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_allergie(app, resource, partial_update_params['maj:enfant_0'] = False resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) assert len(partial_update_params['ENFANT']) == 1 assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "3535" @@ -1564,7 +1730,9 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_allergie(app, resource, partial_update_params['maj:enfant_0'] = False resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) assert 'ENFANT' not in partial_update_params @@ -1575,7 +1743,9 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_allergie(app, resource, partial_update_params['maj:enfant_1'] = False resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) new_values = partial_update_params['ENFANT'][0]['SANITAIRE']['ALLERGIE'] old_values = family_data['ENFANT'][0]['SANITAIRE']['ALLERGIE'] @@ -1590,7 +1760,9 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_allergie(app, resource, jsonschema.validate(partial_update_params, json_schema) -def test_sanitize_update_family_data_enfant_n_sanitaire_allergie_values(app, resource, update_params, family_data): +def test_sanitize_update_family_data_enfant_n_sanitaire_allergie_values( + app, resource, update_params, family_data +): json_schema = schemas.UPDATE_FAMILY_SCHEMA # check values partial_update_params = copy.deepcopy(update_params) @@ -1601,7 +1773,9 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_allergie_values(app, res partial_update_params['ENFANT'][0]['SANITAIRE']['ALLERGIE']['AUTRES'] = '' resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) assert 'ALLERGIE' not in partial_update_params['ENFANT'][0]['SANITAIRE'] @@ -1613,7 +1787,9 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_allergie_values(app, res partial_update_params['ENFANT'][0]['SANITAIRE']['ALLERGIE']['AUTRES'] = '' resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) assert 'ALLERGIE' not in partial_update_params['ENFANT'][0]['SANITAIRE'] @@ -1625,7 +1801,9 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_allergie_values(app, res partial_update_params['ENFANT'][0]['SANITAIRE']['ALLERGIE']['AUTRES'] = 'accariens' resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) assert partial_update_params['ENFANT'][0]['SANITAIRE']['ALLERGIE'] == [ {'TYPE': 'ASTHME', 'ALLERGIQUE': 'OUI', 'NOMALLERGIE': None}, @@ -1642,7 +1820,9 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_allergie_values(app, res partial_update_params['ENFANT'][0]['SANITAIRE']['ALLERGIE']['AUTRES'] = '' resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) assert partial_update_params['ENFANT'][0]['SANITAIRE']['ALLERGIE'] == [ {'TYPE': 'MEDICAMENTEUSES', 'ALLERGIQUE': 'OUI', 'NOMALLERGIE': None}, @@ -1766,7 +1946,9 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_handicap(app, resource, resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) assert '_to_reset' in partial_update_params['ENFANT'][0]['SANITAIRE']['HANDICAP'] - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) assert len(partial_update_params['ENFANT']) == 1 assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "4242" @@ -1783,7 +1965,9 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_handicap(app, resource, partial_update_params['maj:enfant_0'] = False resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) assert len(partial_update_params['ENFANT']) == 1 assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "3535" @@ -1795,7 +1979,9 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_handicap(app, resource, partial_update_params['maj:enfant_0'] = False resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) assert 'ENFANT' not in partial_update_params @@ -1809,7 +1995,9 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_handicap(app, resource, partial_update_params['ENFANT'][0]['SANITAIRE']['HANDICAP'][key] = None resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data + ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) new_values = partial_update_params['ENFANT'][0]['SANITAIRE'] for key in handicap_fields: @@ -1825,16 +2013,19 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_handicap(app, resource, jsonschema.validate(partial_update_params, json_schema) -@pytest.mark.parametrize('flags', [ - # get family data to fill handicap fields - ['maj:enfant_0_sanitaire_handicap', 'maj:revenus', 'maj:rl1', 'maj:rl2'], - # get family data to fill revenus fields - ['maj:rl1', 'maj:rl2'], - # get family data to fill rl1 fields - ['maj:revenus', 'maj:rl2'], - # get family data to fill rl2 fields - ['maj:revenus', 'maj:rl1'], -]) +@pytest.mark.parametrize( + 'flags', + [ + # get family data to fill handicap fields + ['maj:enfant_0_sanitaire_handicap', 'maj:revenus', 'maj:rl1', 'maj:rl2'], + # get family data to fill revenus fields + ['maj:rl1', 'maj:rl2'], + # get family data to fill rl1 fields + ['maj:revenus', 'maj:rl2'], + # get family data to fill rl2 fields + ['maj:revenus', 'maj:rl1'], + ], +) def test_sanitize_update_family_data_axel_error(app, resource, update_params, flags): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') json_schema = schemas.UPDATE_FAMILY_SCHEMA @@ -1852,7 +2043,9 @@ def test_sanitize_update_family_data_axel_error(app, resource, update_params, fl def test_update_family_info_endpoint_sanitize_axel_error(app, resource, update_params): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.sanitize_update_family_data') as sanitize: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.sanitize_update_family_data' + ) as sanitize: sanitize.side_effect = APIError('Axel error: FooBar') resp = app.post_json('/toulouse-axel/test/update_family_info?NameID=yyy', params=update_params) assert resp.json['err_desc'] == "Axel error: FooBar" @@ -1921,8 +2114,8 @@ def test_invoices_endpoint(app, resource): 'MONTANTTOTAL': '44.94', 'NUMFACTURE': 42, 'RESTEAPAYER': '4.94', - } - } + } + }, }, { 'id': 'XXX-43', @@ -1949,8 +2142,8 @@ def test_invoices_endpoint(app, resource): 'NUMFACTURE': 43, 'RESTEAPAYER': '44.94', } - } - } + }, + }, ] with mock_getdata(content, 'RefFactureAPayer'): resp = app.get('/toulouse-axel/test/regie/AUTREREGIE/invoices?NameID=yyy') @@ -1980,8 +2173,8 @@ def test_invoices_endpoint(app, resource): 'MONTANTTOTAL': '44.94', 'NUMFACTURE': 44, 'RESTEAPAYER': '44.94', - } - } + } + }, } ] @@ -2046,9 +2239,9 @@ def test_invoices_history_endpoint(app, resource): 'LIBELLE': 'PRESTATIONS SEPTEMBRE 2015', 'MONTANT': '28.98', 'NOFACTURE': 42, - 'NUMDIRECTION': 10 + 'NUMDIRECTION': 10, } - } + }, }, { 'amount': 0, @@ -2073,10 +2266,10 @@ def test_invoices_history_endpoint(app, resource): 'LIBELLE': 'PRESTATIONS OCTOBRE 2015', 'MONTANT': '28.98', 'NOFACTURE': 43, - 'NUMDIRECTION': 11 + 'NUMDIRECTION': 11, } - } - } + }, + }, ] @@ -2141,7 +2334,7 @@ def test_invoice_endpoint(app, resource): 'NUMFACTURE': 42, 'RESTEAPAYER': '4.94', } - } + }, } filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/invoices_history.xml') @@ -2173,9 +2366,9 @@ def test_invoice_endpoint(app, resource): 'LIBELLE': 'PRESTATIONS SEPTEMBRE 2015', 'MONTANT': '28.98', 'NOFACTURE': 42, - 'NUMDIRECTION': 10 + 'NUMDIRECTION': 10, } - } + }, } @@ -2259,7 +2452,9 @@ def test_pay_invoice_endpoint_axel_error(app, resource): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') with mock.patch('passerelle.contrib.toulouse_axel.schemas.ref_facture_a_payer') as operation: operation.side_effect = AxelError('FooBar') - resp = app.post_json('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-42/pay?NameID=yyy', params=payload) + resp = app.post_json( + '/toulouse-axel/test/regie/MAREGIE/invoice/XXX-42/pay?NameID=yyy', params=payload + ) assert resp.json['err_desc'] == "Axel error: FooBar" assert resp.json['err'] == 'error' @@ -2269,7 +2464,9 @@ def test_pay_invoice_endpoint_axel_error(app, resource): with mock_getdata(content, 'RefFactureAPayer'): with mock.patch('passerelle.contrib.toulouse_axel.schemas.form_paiement_dui') as operation: operation.side_effect = AxelError('FooBar') - resp = app.post_json('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-42/pay?NameID=yyy', params=payload) + resp = app.post_json( + '/toulouse-axel/test/regie/MAREGIE/invoice/XXX-42/pay?NameID=yyy', params=payload + ) assert resp.json['err_desc'] == "Axel error: FooBar" assert resp.json['err'] == 'error' @@ -2283,11 +2480,15 @@ def test_pay_invoice_endpoint_no_result(app, resource): with open(filepath) as xml: content = xml.read() with mock_getdata(content, 'RefFactureAPayer'): - resp = app.post_json('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-35/pay?NameID=yyy', params=payload) + resp = app.post_json( + '/toulouse-axel/test/regie/MAREGIE/invoice/XXX-35/pay?NameID=yyy', params=payload + ) assert resp.json['err_desc'] == "Invoice not found" assert resp.json['err'] == 'not-found' with mock_getdata(content, 'RefFactureAPayer'): - resp = app.post_json('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-44/pay?NameID=yyy', params=payload) + resp = app.post_json( + '/toulouse-axel/test/regie/MAREGIE/invoice/XXX-44/pay?NameID=yyy', params=payload + ) assert resp.json['err_desc'] == "Invoice not found" assert resp.json['err'] == 'not-found' @@ -2303,7 +2504,9 @@ def test_pay_invoice_endpoint(app, resource): content = xml.read() with mock_getdata(content, 'RefFactureAPayer'): with mock.patch('passerelle.contrib.toulouse_axel.schemas.form_paiement_dui') as operation: - resp = app.post_json('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-42/pay?NameID=yyy', params=payload) + resp = app.post_json( + '/toulouse-axel/test/regie/MAREGIE/invoice/XXX-42/pay?NameID=yyy', params=payload + ) assert resp.json['err'] == 0 assert resp.json['data'] is True assert operation.call_args_list[0][0][1] == { @@ -2343,16 +2546,19 @@ def test_clae_years_endpoint_no_result(app, resource): assert resp.json['err'] == 'not-found' -@pytest.mark.parametrize('today,pivot,next_year,next_date', [ - ('2019-08-01', '08-01', True, '2020-08-01'), - ('2019-08-01', '08-02', False, None), - ('2020-07-31', '07-31', True, '2021-07-31'), - ('2020-07-30', '07-31', False, None), - ('2020-06-14', '06-15', False, None), - ('2020-06-15', '06-15', True, '2021-06-15'), - ('2020-06-16', '06-15', True, '2021-06-16'), - ('2020-02-29', '02-01', True, '2021-03-01'), -]) +@pytest.mark.parametrize( + 'today,pivot,next_year,next_date', + [ + ('2019-08-01', '08-01', True, '2020-08-01'), + ('2019-08-01', '08-02', False, None), + ('2020-07-31', '07-31', True, '2021-07-31'), + ('2020-07-30', '07-31', False, None), + ('2020-06-14', '06-15', False, None), + ('2020-06-15', '06-15', True, '2021-06-15'), + ('2020-06-16', '06-15', True, '2021-06-16'), + ('2020-02-29', '02-01', True, '2021-03-01'), + ], +) def test_clae_years_endpoint(app, resource, today, pivot, next_year, next_date): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/child_activities.xml') @@ -2364,7 +2570,12 @@ def test_clae_years_endpoint(app, resource, today, pivot, next_year, next_date): assert resp.json['data'][0] == {'id': '2019', 'text': '2019/2020', 'type': 'encours', 'refdate': today} if next_year: assert len(resp.json['data']) == 2 - assert resp.json['data'][1] == {'id': '2020', 'text': '2020/2021', 'type': 'suivante', 'refdate': next_date} + assert resp.json['data'][1] == { + 'id': '2020', + 'text': '2020/2021', + 'type': 'suivante', + 'refdate': next_date, + } else: assert len(resp.json['data']) == 1 @@ -2372,7 +2583,9 @@ def test_clae_years_endpoint(app, resource, today, pivot, next_year, next_date): @freezegun.freeze_time('2020-06-30') def test_clae_years_endpoint_noactivities(app, resource): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_children_activities', return_value={}) as mock_activities: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_children_activities', return_value={} + ) as mock_activities: resp = app.get('/toulouse-axel/test/clae_years?NameID=yyy&pivot_date=06-15') assert mock_activities.call_args_list == [mock.call(dui='XXX', reference_year=2020)] assert len(resp.json['data']) == 1 @@ -2390,7 +2603,9 @@ def test_clae_children_activities_info_endpoint_axel_error(app, resource): @pytest.mark.parametrize('value', ['foo', '20/01/2020', '2020']) def test_clae_children_activities_info_endpoint_bad_date_format(app, resource, value): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') - resp = app.get('/toulouse-axel/test/clae_children_activities_info?NameID=yyy&booking_date=%s' % value, status=400) + resp = app.get( + '/toulouse-axel/test/clae_children_activities_info?NameID=yyy&booking_date=%s' % value, status=400 + ) assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" assert resp.json['err'] == 'bad-request' @@ -2422,7 +2637,7 @@ def test_clae_children_activities_info_endpoint(app, resource): 'ISPAI': False, 'LIBELLEACTIVITE': 'Temps du matin', 'TARIF': '0.42', - 'TYPEACTIVITE': 'MAT' + 'TYPEACTIVITE': 'MAT', }, { 'COUTREVIENT': '99999', @@ -2434,7 +2649,7 @@ def test_clae_children_activities_info_endpoint(app, resource): 'ISPAI': False, 'LIBELLEACTIVITE': 'Temps du midi', 'TARIF': '0.43', - 'TYPEACTIVITE': 'MIDI' + 'TYPEACTIVITE': 'MIDI', }, { 'COUTREVIENT': '99999', @@ -2446,7 +2661,7 @@ def test_clae_children_activities_info_endpoint(app, resource): 'ISPAI': False, 'LIBELLEACTIVITE': 'Temps du soir', 'TARIF': '0.44', - 'TYPEACTIVITE': 'SOIR' + 'TYPEACTIVITE': 'SOIR', }, { 'COUTREVIENT': '99999', @@ -2458,8 +2673,8 @@ def test_clae_children_activities_info_endpoint(app, resource): 'ISPAI': False, 'LIBELLEACTIVITE': 'Temps mercredi apr\xe8s midi', 'TARIF': '0.45', - 'TYPEACTIVITE': 'GARD' - } + 'TYPEACTIVITE': 'GARD', + }, ], 'DATENAISSANCE': '2019-01-01', 'IDPERSONNE': '3535', @@ -2469,7 +2684,7 @@ def test_clae_children_activities_info_endpoint(app, resource): 'REGIME': 'SV', 'REGIME_label': 'Menu sans viande', 'id': '3535', - 'text': 'foo foo' + 'text': 'foo foo', } ] @@ -2478,31 +2693,51 @@ def test_clae_children_activities_info_endpoint(app, resource): assert resp.json['err'] == 0 -@pytest.mark.parametrize('activities, expected', [ - # all activities except GARD - optional - (['MAT', 'MIDI', 'SOIR'], True), - # all activities - (['MAT', 'MIDI', 'SOIR', 'GARD'], True), - # duplicated activities - (['MAT', 'MAT', 'MIDI', 'SOIR', 'GARD'], False), - (['MAT', 'MIDI', 'MIDI', 'SOIR', 'GARD'], False), - (['MAT', 'MIDI', 'SOIR', 'SOIR', 'GARD'], False), - (['MAT', 'MIDI', 'SOIR', 'GARD', 'GARD'], False), - # missing activity - (['MIDI', 'SOIR'], False), - (['MAT', 'SOIR'], False), - (['MAT', 'MIDI'], False), -]) +@pytest.mark.parametrize( + 'activities, expected', + [ + # all activities except GARD - optional + (['MAT', 'MIDI', 'SOIR'], True), + # all activities + (['MAT', 'MIDI', 'SOIR', 'GARD'], True), + # duplicated activities + (['MAT', 'MAT', 'MIDI', 'SOIR', 'GARD'], False), + (['MAT', 'MIDI', 'MIDI', 'SOIR', 'GARD'], False), + (['MAT', 'MIDI', 'SOIR', 'SOIR', 'GARD'], False), + (['MAT', 'MIDI', 'SOIR', 'GARD', 'GARD'], False), + # missing activity + (['MIDI', 'SOIR'], False), + (['MAT', 'SOIR'], False), + (['MAT', 'MIDI'], False), + ], +) def test_clae_children_activities_info_check(app, resource, activities, expected): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') result = [] for activity in activities: result.append({'IDACTIVITE': 'foo%s' % activity, 'TYPEACTIVITE': activity}) - result = {'DATA': {'PORTAIL': {'DUI': {'ENFANT': [ - {'IDPERSONNE': '42', 'NOM': '', 'PRENOM': '', 'REGIME': '', 'LIBELLEECOLE': '', 'ACTIVITE': result} - ]}}}} + result = { + 'DATA': { + 'PORTAIL': { + 'DUI': { + 'ENFANT': [ + { + 'IDPERSONNE': '42', + 'NOM': '', + 'PRENOM': '', + 'REGIME': '', + 'LIBELLEECOLE': '', + 'ACTIVITE': result, + } + ] + } + } + } + } with mock.patch('passerelle.contrib.toulouse_axel.schemas.enfants_activites') as operation: - operation.return_value = schemas.OperationResult(json_response=result, xml_request='', xml_response='') + operation.return_value = schemas.OperationResult( + json_response=result, xml_request='', xml_response='' + ) resp = app.get('/toulouse-axel/test/clae_children_activities_info?NameID=yyy&booking_date=2020-01-20') assert resp.json['err'] == 0 assert len(resp.json['data']) == expected and 1 or 0 @@ -2512,7 +2747,9 @@ def test_clae_booking_activities_info_endpoint_axel_error(app, resource): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') with mock.patch('passerelle.contrib.toulouse_axel.schemas.enfants_activites') as operation: operation.side_effect = AxelError('FooBar') - resp = app.get('/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24') + resp = app.get( + '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24' + ) assert resp.json['err_desc'] == "Axel error: FooBar" assert resp.json['err'] == 'error' @@ -2522,7 +2759,9 @@ def test_clae_booking_activities_info_endpoint_axel_error(app, resource): with mock_getdata(content, 'EnfantsActivites'): with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_periode') as operation: operation.side_effect = AxelError('FooBar') - resp = app.get('/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24') + resp = app.get( + '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24' + ) assert resp.json['err_desc'] == "Axel error: FooBar" assert resp.json['err'] == 'error' @@ -2530,16 +2769,26 @@ def test_clae_booking_activities_info_endpoint_axel_error(app, resource): @pytest.mark.parametrize('value', ['foo', '20/01/2020', '2020']) def test_clae_booking_activities_info_endpoint_bad_date_format(app, resource, value): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') - resp = app.get('/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=%s&end_date=2020-01-24' % value, status=400) + resp = app.get( + '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=%s&end_date=2020-01-24' + % value, + status=400, + ) assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" assert resp.json['err'] == 'bad-request' - resp = app.get('/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=%s' % value, status=400) + resp = app.get( + '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=%s' + % value, + status=400, + ) assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" assert resp.json['err'] == 'bad-request' def test_clae_booking_activities_info_endpoint_no_result(app, resource, child_activities_data): - resp = app.get('/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24') + resp = app.get( + '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24' + ) assert resp.json['err_desc'] == "Person not found" assert resp.json['err'] == 'not-found' @@ -2550,7 +2799,9 @@ def test_clae_booking_activities_info_endpoint_no_result(app, resource, child_ac with mock_getdata(content, 'EnfantsActivites'): with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_periode') as operation: operation.side_effect = AxelError('FooBar') - resp = app.get('/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=4242&start_date=2020-01-20&end_date=2020-01-24') + resp = app.get( + '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=4242&start_date=2020-01-20&end_date=2020-01-24' + ) assert resp.json['err_desc'] == "Child not found" assert resp.json['err'] == 'not-found' @@ -2569,8 +2820,13 @@ def test_clae_booking_activities_info_endpoint_no_result(app, resource, child_ac """ activities = child_activities_data['ENFANT'][0] with mock_getdata(content, 'ReservationPeriode'): - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): - resp = app.get('/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24') + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', + return_value=activities, + ): + resp = app.get( + '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24' + ) assert resp.json['err_desc'] == "Child not found" assert resp.json['err'] == 'not-found' @@ -2582,8 +2838,13 @@ def test_clae_booking_activities_info_endpoint(app, resource, child_activities_d content = xml.read() activities = copy.deepcopy(child_activities_data['ENFANT'][0]) with mock_getdata(content, 'ReservationPeriode'): - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): - resp = app.get('/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24') + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', + return_value=activities, + ): + resp = app.get( + '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24' + ) assert resp.json['err'] == 0 assert resp.json['data'] == [ { @@ -2740,8 +3001,13 @@ def test_clae_booking_activities_info_endpoint(app, resource, child_activities_d activities = copy.deepcopy(child_activities_data['ENFANT'][0]) activities['ACTIVITE'] = activities['ACTIVITE'][:1] with mock_getdata(content, 'ReservationPeriode'): - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): - resp = app.get('/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24') + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', + return_value=activities, + ): + resp = app.get( + '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24' + ) assert resp.json['err'] == 0 assert resp.json['data'] == [ { @@ -2784,30 +3050,48 @@ def test_clae_booking_activities_info_endpoint(app, resource, child_activities_d @freezegun.freeze_time('2020-03-26') -@pytest.mark.parametrize('start_date, end_date, last_date', [ - # end date is a not a friday - ('2020-04-07', '2020-04-13', '2020-04-13'), - # except if end date is saturday or sunday => ends on previous friday - ('2020-04-07', '2020-04-18', '2020-04-17'), - ('2020-04-07', '2020-04-19', '2020-04-17'), - # start date is after end date, same week => result is empty - ('2020-04-16', '2020-04-15', None), - # start date is after end date, not the same week => result is empty - ('2020-04-16', '2020-04-12', None), -]) +@pytest.mark.parametrize( + 'start_date, end_date, last_date', + [ + # end date is a not a friday + ('2020-04-07', '2020-04-13', '2020-04-13'), + # except if end date is saturday or sunday => ends on previous friday + ('2020-04-07', '2020-04-18', '2020-04-17'), + ('2020-04-07', '2020-04-19', '2020-04-17'), + # start date is after end date, same week => result is empty + ('2020-04-16', '2020-04-15', None), + # start date is after end date, not the same week => result is empty + ('2020-04-16', '2020-04-12', None), + ], +) def test_clae_booking_activities_info_period(app, resource, start_date, end_date, last_date): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_booking_data') as activities_info: - activities_info.return_value = {'ACTIVITE': [{ - 'id': 'TOTO', - 'booking': {'days': {'monday': True, 'tuesday': True, 'wednesday': True, 'thursday': True, 'friday': True}}, - 'TYPEACTIVITE': 'MAT', - 'LIBELLEACTIVITE': 'Matin', - }]} + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_booking_data' + ) as activities_info: + activities_info.return_value = { + 'ACTIVITE': [ + { + 'id': 'TOTO', + 'booking': { + 'days': { + 'monday': True, + 'tuesday': True, + 'wednesday': True, + 'thursday': True, + 'friday': True, + } + }, + 'TYPEACTIVITE': 'MAT', + 'LIBELLEACTIVITE': 'Matin', + } + ] + } resp = app.get( '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535' - '&start_date=%s&end_date=%s' % (start_date, end_date)) + '&start_date=%s&end_date=%s' % (start_date, end_date) + ) if last_date is None: assert resp.json['data'] == [] else: @@ -2819,16 +3103,31 @@ def test_clae_booking_activities_info_period(app, resource, start_date, end_date def test_clae_booking_activities_info_bookable(app, resource): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_booking_data') as activities_info: - activities_info.return_value = {'ACTIVITE': [{ - 'id': 'TOTO', - 'booking': {'days': {'monday': True, 'tuesday': True, 'wednesday': True, 'thursday': True, 'friday': True}}, - 'TYPEACTIVITE': 'MAT', - 'LIBELLEACTIVITE': 'Matin', - }]} + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_booking_data' + ) as activities_info: + activities_info.return_value = { + 'ACTIVITE': [ + { + 'id': 'TOTO', + 'booking': { + 'days': { + 'monday': True, + 'tuesday': True, + 'wednesday': True, + 'thursday': True, + 'friday': True, + } + }, + 'TYPEACTIVITE': 'MAT', + 'LIBELLEACTIVITE': 'Matin', + } + ] + } resp = app.get( '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535' - '&start_date=2020-04-01&end_date=2020-04-30') + '&start_date=2020-04-01&end_date=2020-04-30' + ) activities = {a['day']: a['bookable'] for a in resp.json['data']} assert activities['2020-04-01'] is False @@ -2862,7 +3161,8 @@ def test_clae_booking_activity_possible_days_endpoint_axel_error(app, resource): operation.side_effect = AxelError('FooBar') resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' + ) assert resp.json['err_desc'] == "Axel error: FooBar" assert resp.json['err'] == 'error' @@ -2874,7 +3174,8 @@ def test_clae_booking_activity_possible_days_endpoint_axel_error(app, resource): operation.side_effect = AxelError('FooBar') resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' + ) assert resp.json['err_desc'] == "Axel error: FooBar" assert resp.json['err'] == 'error' @@ -2885,12 +3186,16 @@ def test_clae_booking_activity_possible_days_endpoint_bad_date_format(app, resou Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' - '&start_date=%s&end_date=2020-01-24&activity_type=MAT' % value, status=400) + '&start_date=%s&end_date=2020-01-24&activity_type=MAT' % value, + status=400, + ) assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" assert resp.json['err'] == 'bad-request' resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' - '&start_date=2020-02-20&end_date=%s&activity_type=MAT' % value, status=400) + '&start_date=2020-02-20&end_date=%s&activity_type=MAT' % value, + status=400, + ) assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" assert resp.json['err'] == 'bad-request' @@ -2900,7 +3205,9 @@ def test_clae_booking_activity_possible_days_endpoint_activity_type(app, resourc Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=FOO', status=400) + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=FOO', + status=400, + ) assert resp.json['err_desc'] == "bad activity_type, should be MAT, MIDI, SOIR or GARD" assert resp.json['err'] == 'bad-request' @@ -2909,7 +3216,8 @@ def test_clae_booking_activity_possible_days_endpoint_activity_type(app, resourc def test_clae_booking_activity_possible_days_endpoint_no_result(app, resource, child_activities_data): resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' + ) assert resp.json['err_desc'] == "Person not found" assert resp.json['err'] == 'not-found' @@ -2922,7 +3230,8 @@ def test_clae_booking_activity_possible_days_endpoint_no_result(app, resource, c operation.side_effect = AxelError('FooBar') resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=4242' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' + ) assert resp.json['err_desc'] == "Child not found" assert resp.json['err'] == 'not-found' @@ -2941,18 +3250,25 @@ def test_clae_booking_activity_possible_days_endpoint_no_result(app, resource, c """ activities = child_activities_data['ENFANT'][0] with mock_getdata(content, 'ReservationPeriode'): - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', + return_value=activities, + ): resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' + ) assert resp.json['err_desc'] == "Child not found" assert resp.json['err'] == 'not-found' - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_booking_data') as activities_info: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_booking_data' + ) as activities_info: activities_info.return_value = {'ACTIVITE': []} resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' + ) assert resp.json['err'] == 0 assert resp.json['data'] == [] @@ -2980,10 +3296,14 @@ def test_clae_booking_activity_possible_days_endpoint(app, resource, child_activ """ activities = child_activities_data['ENFANT'][0] with mock_getdata(content, 'ReservationPeriode'): - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', + return_value=activities, + ): resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MIDI') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MIDI' + ) assert resp.json['err'] == 0 assert len(resp.json['data']) == 5 assert set(resp.json['data'][0].keys()) == set(['id', 'text', 'disabled', 'prefill', 'details']) @@ -3009,52 +3329,77 @@ def test_clae_booking_activity_possible_days_endpoint(app, resource, child_activ assert resp.json['data'][4]['prefill'] is True # again - data are in cache - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities + ): resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' + ) assert resp.json['err'] == 0 assert len(resp.json['data']) == 5 -@pytest.mark.parametrize('today, start_date, first_date, end_date, last_date', [ - # today is start date -> + 8 days - ('2020-03-26', '2020-03-26', '2020-04-03', '2020-04-17', '2020-04-17'), - # start date is a friday => starts friday - ('2020-03-26', '2020-04-03', '2020-04-03', '2020-04-17', '2020-04-17'), - # start date is a saturday => starts on next monday - ('2020-03-26', '2020-04-04', '2020-04-06', '2020-04-17', '2020-04-17'), - # start date is a sunday => starts on next monday - ('2020-03-26', '2020-04-05', '2020-04-06', '2020-04-17', '2020-04-17'), - # start date is a monday => starts on monday - ('2020-03-26', '2020-04-06', '2020-04-06', '2020-04-17', '2020-04-17'), - # start date is a tuesday => starts on tuesday - ('2020-03-26', '2020-04-07', '2020-04-07', '2020-04-17', '2020-04-17'), - # end date is a not a friday - ('2020-03-26', '2020-04-07', '2020-04-07', '2020-04-13', '2020-04-13'), - # except if end date is saturday or sunday => ends on previous friday - ('2020-03-26', '2020-04-07', '2020-04-07', '2020-04-18', '2020-04-17'), - ('2020-03-26', '2020-04-07', '2020-04-07', '2020-04-19', '2020-04-17'), - # start date is after end date, same week => result is empty - ('2020-03-26', '2020-04-16', None, '2020-04-15', None), - # start date is after end date, not the same week => result is empty - ('2020-03-26', '2020-04-16', None, '2020-04-12', None), -]) -def test_clae_booking_activity_possible_days_period(app, resource, today, start_date, first_date, end_date, last_date, child_activities_data): +@pytest.mark.parametrize( + 'today, start_date, first_date, end_date, last_date', + [ + # today is start date -> + 8 days + ('2020-03-26', '2020-03-26', '2020-04-03', '2020-04-17', '2020-04-17'), + # start date is a friday => starts friday + ('2020-03-26', '2020-04-03', '2020-04-03', '2020-04-17', '2020-04-17'), + # start date is a saturday => starts on next monday + ('2020-03-26', '2020-04-04', '2020-04-06', '2020-04-17', '2020-04-17'), + # start date is a sunday => starts on next monday + ('2020-03-26', '2020-04-05', '2020-04-06', '2020-04-17', '2020-04-17'), + # start date is a monday => starts on monday + ('2020-03-26', '2020-04-06', '2020-04-06', '2020-04-17', '2020-04-17'), + # start date is a tuesday => starts on tuesday + ('2020-03-26', '2020-04-07', '2020-04-07', '2020-04-17', '2020-04-17'), + # end date is a not a friday + ('2020-03-26', '2020-04-07', '2020-04-07', '2020-04-13', '2020-04-13'), + # except if end date is saturday or sunday => ends on previous friday + ('2020-03-26', '2020-04-07', '2020-04-07', '2020-04-18', '2020-04-17'), + ('2020-03-26', '2020-04-07', '2020-04-07', '2020-04-19', '2020-04-17'), + # start date is after end date, same week => result is empty + ('2020-03-26', '2020-04-16', None, '2020-04-15', None), + # start date is after end date, not the same week => result is empty + ('2020-03-26', '2020-04-16', None, '2020-04-12', None), + ], +) +def test_clae_booking_activity_possible_days_period( + app, resource, today, start_date, first_date, end_date, last_date, child_activities_data +): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') activities = child_activities_data['ENFANT'][0] - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_booking_data') as activities_info: - activities_info.return_value = {'ACTIVITE': [{ - 'id': 'TOTO', - 'TYPEACTIVITE': 'MAT', - 'booking': {'days': {'monday': True, 'tuesday': True, 'wednesday': True, 'thursday': True, 'friday': True}} - }]} + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities + ): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_booking_data' + ) as activities_info: + activities_info.return_value = { + 'ACTIVITE': [ + { + 'id': 'TOTO', + 'TYPEACTIVITE': 'MAT', + 'booking': { + 'days': { + 'monday': True, + 'tuesday': True, + 'wednesday': True, + 'thursday': True, + 'friday': True, + } + }, + } + ] + } with freezegun.freeze_time(today): resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' - '&start_date=%s&end_date=%s&activity_type=MAT' % (start_date, end_date)) + '&start_date=%s&end_date=%s&activity_type=MAT' % (start_date, end_date) + ) if first_date is None: assert resp.json['data'] == [] else: @@ -3065,11 +3410,14 @@ def test_clae_booking_activity_possible_days_period(app, resource, today, start_ def test_clae_booking_activity_possible_days_next_year(app, resource): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') with freezegun.freeze_time('2020-07-01'): - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities') as mock_activities: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities' + ) as mock_activities: mock_activities.return_value = {} app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' - '&start_date=2020-09-01&end_date=2020-09-30&activity_type=MAT') + '&start_date=2020-09-01&end_date=2020-09-30&activity_type=MAT' + ) assert mock_activities.call_args_list == [ mock.call(child_id='3535', dui='XXX', reference_year=2020), mock.call(child_id='3535', dui='XXX', reference_year=2020), @@ -3084,26 +3432,48 @@ def test_clae_booking_activity_possible_days_endpoint_dates(app, resource, child activities['ACTIVITE'][1]['DATEENTREE'] = '2019-10-01' # last activity ends before 2020-07-31 activities['ACTIVITE'][3]['DATESORTIE'] = '2020-06-30' - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_booking_data') as activities_info: - activities_info.return_value = {'ACTIVITE': [{ - 'id': 'TOTO', - 'TYPEACTIVITE': 'MAT', - 'booking': {'days': {'monday': True, 'tuesday': True, 'wednesday': True, 'thursday': True, 'friday': True}} - }]} + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_booking_data' + ) as activities_info: + activities_info.return_value = { + 'ACTIVITE': [ + { + 'id': 'TOTO', + 'TYPEACTIVITE': 'MAT', + 'booking': { + 'days': { + 'monday': True, + 'tuesday': True, + 'wednesday': True, + 'thursday': True, + 'friday': True, + } + }, + } + ] + } # it is not possible to book before 2019-10-01 - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', + return_value=activities, + ): resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' - '&start_date=2019-09-01&end_date=2019-10-11&activity_type=MAT') + '&start_date=2019-09-01&end_date=2019-10-11&activity_type=MAT' + ) assert resp.json['err'] == 0 assert len(resp.json['data']) == 9 assert resp.json['data'][0]['id'] == '3535:MAT:TOTO:2019-10-01' assert resp.json['data'][-1]['id'] == '3535:MAT:TOTO:2019-10-11' # it is not possible to book after 2020-06-30 - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', + return_value=activities, + ): resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' - '&start_date=2020-06-20&end_date=2020-07-15&activity_type=MAT') + '&start_date=2020-06-20&end_date=2020-07-15&activity_type=MAT' + ) assert resp.json['err'] == 0 assert len(resp.json['data']) == 7 assert resp.json['data'][0]['id'] == '3535:MAT:TOTO:2020-06-22' @@ -3117,7 +3487,8 @@ def test_clae_booking_activity_annual_possible_days_endpoint_axel_error(app, res operation.side_effect = AxelError('FooBar') resp = app.get( '/toulouse-axel/test/clae_booking_activity_annual_possible_days?NameID=yyy&idpersonne=3535' - '&activity_type=MAT&booking_date=2019-09-01') + '&activity_type=MAT&booking_date=2019-09-01' + ) assert resp.json['err_desc'] == "Axel error: FooBar" assert resp.json['err'] == 'error' @@ -3128,7 +3499,9 @@ def test_clae_booking_activity_annual_possible_days_endpoint_bad_date_format(app Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') resp = app.get( '/toulouse-axel/test/clae_booking_activity_annual_possible_days?NameID=yyy&idpersonne=3535' - '&activity_type=MAT&booking_date=%s' % value, status=400) + '&activity_type=MAT&booking_date=%s' % value, + status=400, + ) assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" assert resp.json['err'] == 'bad-request' @@ -3138,7 +3511,9 @@ def test_clae_booking_activity_annual_possible_days_endpoint_activity_type(app, Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') resp = app.get( '/toulouse-axel/test/clae_booking_activity_annual_possible_days?NameID=yyy&idpersonne=3535' - '&activity_type=FOO&booking_date=2019-09-01', status=400) + '&activity_type=FOO&booking_date=2019-09-01', + status=400, + ) assert resp.json['err_desc'] == "bad activity_type, should be MAT, MIDI, SOIR or GARD" assert resp.json['err'] == 'bad-request' @@ -3147,7 +3522,8 @@ def test_clae_booking_activity_annual_possible_days_endpoint_activity_type(app, def test_clae_booking_activity_annual_possible_days_endpoint_no_result(app, resource): resp = app.get( '/toulouse-axel/test/clae_booking_activity_annual_possible_days?NameID=yyy&idpersonne=3535' - '&activity_type=MAT&booking_date=2019-09-01') + '&activity_type=MAT&booking_date=2019-09-01' + ) assert resp.json['err_desc'] == "Person not found" assert resp.json['err'] == 'not-found' @@ -3158,14 +3534,19 @@ def test_clae_booking_activity_annual_possible_days_endpoint_no_result(app, reso with mock_getdata(content, 'EnfantsActivites'): resp = app.get( '/toulouse-axel/test/clae_booking_activity_annual_possible_days?NameID=yyy&idpersonne=4242' - '&activity_type=MAT&booking_date=2019-09-01') + '&activity_type=MAT&booking_date=2019-09-01' + ) assert resp.json['err_desc'] == "Child not found" assert resp.json['err'] == 'not-found' - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value={'ACTIVITE': []}): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', + return_value={'ACTIVITE': []}, + ): resp = app.get( '/toulouse-axel/test/clae_booking_activity_annual_possible_days?NameID=yyy&idpersonne=3535' - '&activity_type=MAT&booking_date=2019-09-01') + '&activity_type=MAT&booking_date=2019-09-01' + ) assert resp.json['err'] == 0 assert resp.json['data'] == [] @@ -3174,10 +3555,13 @@ def test_clae_booking_activity_annual_possible_days_endpoint_no_result(app, reso def test_clae_booking_activity_annual_possible_days_endpoint(app, resource, child_activities_data): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') activities = child_activities_data['ENFANT'][0] - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities + ): resp = app.get( '/toulouse-axel/test/clae_booking_activity_annual_possible_days?NameID=yyy&idpersonne=3535' - '&activity_type=MAT&booking_date=2019-09-01') + '&activity_type=MAT&booking_date=2019-09-01' + ) assert resp.json['err'] == 0 assert resp.json['data'] == [ { @@ -3206,10 +3590,13 @@ def test_clae_booking_activity_annual_possible_days_endpoint(app, resource, chil 'disabled': False, }, ] - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities + ): resp = app.get( '/toulouse-axel/test/clae_booking_activity_annual_possible_days?NameID=yyy&idpersonne=3535' - '&activity_type=MIDI&booking_date=2019-09-01') + '&activity_type=MIDI&booking_date=2019-09-01' + ) assert resp.json['err'] == 0 assert resp.json['data'] == [ { @@ -3238,10 +3625,13 @@ def test_clae_booking_activity_annual_possible_days_endpoint(app, resource, chil 'disabled': False, }, ] - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities + ): resp = app.get( '/toulouse-axel/test/clae_booking_activity_annual_possible_days?NameID=yyy&idpersonne=3535' - '&activity_type=SOIR&booking_date=2019-09-01') + '&activity_type=SOIR&booking_date=2019-09-01' + ) assert resp.json['err'] == 0 assert resp.json['data'] == [ { @@ -3270,10 +3660,13 @@ def test_clae_booking_activity_annual_possible_days_endpoint(app, resource, chil 'disabled': False, }, ] - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities + ): resp = app.get( '/toulouse-axel/test/clae_booking_activity_annual_possible_days?NameID=yyy&idpersonne=3535' - '&activity_type=GARD&booking_date=2019-09-01') + '&activity_type=GARD&booking_date=2019-09-01' + ) assert resp.json['err'] == 0 assert resp.json['data'] == [ { @@ -3307,11 +3700,14 @@ def test_clae_booking_activity_annual_possible_days_endpoint(app, resource, chil def test_clae_booking_activity_annual_possible_days_next_year(app, resource): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') with freezegun.freeze_time('2020-07-01'): - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities') as mock_activities: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities' + ) as mock_activities: mock_activities.return_value = {} app.get( '/toulouse-axel/test/clae_booking_activity_annual_possible_days?NameID=yyy&idpersonne=3535' - '&activity_type=GARD&booking_date=2020-09-01') + '&activity_type=GARD&booking_date=2020-09-01' + ) assert mock_activities.call_args_list == [ mock.call(child_id='3535', dui='XXX', reference_year=2020), ] @@ -3324,7 +3720,8 @@ def test_clae_booking_activity_prefill_endpoint_axel_error(app, resource): operation.side_effect = AxelError('FooBar') resp = app.get( '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' + ) assert resp.json['err_desc'] == "Axel error: FooBar" assert resp.json['err'] == 'error' @@ -3336,7 +3733,8 @@ def test_clae_booking_activity_prefill_endpoint_axel_error(app, resource): operation.side_effect = AxelError('FooBar') resp = app.get( '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' + ) assert resp.json['err_desc'] == "Axel error: FooBar" assert resp.json['err'] == 'error' @@ -3347,12 +3745,16 @@ def test_clae_booking_activity_prefill_endpoint_bad_date_format(app, resource, v Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') resp = app.get( '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=3535' - '&start_date=%s&end_date=2020-01-24&activity_type=MAT' % value, status=400) + '&start_date=%s&end_date=2020-01-24&activity_type=MAT' % value, + status=400, + ) assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" assert resp.json['err'] == 'bad-request' resp = app.get( '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=%s&activity_type=MAT' % value, status=400) + '&start_date=2020-01-20&end_date=%s&activity_type=MAT' % value, + status=400, + ) assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" assert resp.json['err'] == 'bad-request' @@ -3362,7 +3764,9 @@ def test_clae_booking_activity_prefill_endpoint_activity_type(app, resource): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') resp = app.get( '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=FOO', status=400) + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=FOO', + status=400, + ) assert resp.json['err_desc'] == "bad activity_type, should be MAT, MIDI, SOIR or GARD" assert resp.json['err'] == 'bad-request' @@ -3371,7 +3775,8 @@ def test_clae_booking_activity_prefill_endpoint_activity_type(app, resource): def test_clae_booking_activity_prefill_endpoint_no_result(app, resource, child_activities_data): resp = app.get( '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' + ) assert resp.json['err_desc'] == "Person not found" assert resp.json['err'] == 'not-found' @@ -3384,7 +3789,8 @@ def test_clae_booking_activity_prefill_endpoint_no_result(app, resource, child_a operation.side_effect = AxelError('FooBar') resp = app.get( '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=4242' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' + ) assert resp.json['err_desc'] == "Child not found" assert resp.json['err'] == 'not-found' @@ -3403,25 +3809,34 @@ def test_clae_booking_activity_prefill_endpoint_no_result(app, resource, child_a """ activities = child_activities_data['ENFANT'][0] with mock_getdata(content, 'ReservationPeriode'): - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', + return_value=activities, + ): resp = app.get( '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' + ) assert resp.json['err_desc'] == "Child not found" assert resp.json['err'] == 'not-found' - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.clae_booking_activity_possible_days') as possible_days: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.clae_booking_activity_possible_days' + ) as possible_days: possible_days.return_value = {'data': []} resp = app.get( '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' + ) assert resp.json['err'] == 0 assert resp.json['data'] == [] def test_clae_booking_activity_prefill_endpoint(app, resource): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.clae_booking_activity_possible_days') as possible_days: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.clae_booking_activity_possible_days' + ) as possible_days: possible_days.return_value = { 'data': [ { @@ -3448,7 +3863,8 @@ def test_clae_booking_activity_prefill_endpoint(app, resource): } resp = app.get( '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=3535' - '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT') + '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' + ) assert resp.json['err'] == 0 assert resp.json['data'] == ['3535:MAT:A19P1M1:2020-01-20', '3535:MAT:A19P1M1:2020-01-24'] @@ -3459,25 +3875,35 @@ def test_are_children_registered_axel_error(resource): assert resource.are_children_registered(dui='XXX') == {} -@pytest.mark.parametrize('date_value, expected', [ - ('2019-08-01', {'3535': True}), - ('2020-07-31', {'3535': True}), -]) +@pytest.mark.parametrize( + 'date_value, expected', + [ + ('2019-08-01', {'3535': True}), + ('2020-07-31', {'3535': True}), + ], +) def test_are_children_registered_reference_year(resource, date_value, expected): - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_children_activities') as get_children_activities: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_children_activities' + ) as get_children_activities: get_children_activities.return_value = {'3535': {'ACTIVITE': [{}]}} with freezegun.freeze_time(date_value): assert resource.are_children_registered(dui='XXX') == expected -@pytest.mark.parametrize('activities, expected', [ - ({}, {}), - ({'3535': {'ACTIVITE': []}}, {'3535': False}), - ({'3535': {'ACTIVITE': [{}]}}, {'3535': True}), - ({'3535': {'ACTIVITE': [{}]}, '4242': {}}, {'3535': True, '4242': False}), -]) +@pytest.mark.parametrize( + 'activities, expected', + [ + ({}, {}), + ({'3535': {'ACTIVITE': []}}, {'3535': False}), + ({'3535': {'ACTIVITE': [{}]}}, {'3535': True}), + ({'3535': {'ACTIVITE': [{}]}, '4242': {}}, {'3535': True, '4242': False}), + ], +) def test_are_children_registered(resource, activities, expected): - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_children_activities') as get_children_activities: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_children_activities' + ) as get_children_activities: get_children_activities.return_value = activities assert resource.are_children_registered(dui='XXX') == expected @@ -3556,7 +3982,9 @@ def test_clae_booking_endpoint_no_result(app, resource, booking_params): def test_clae_booking_endpoint(app, resource, booking_params, child_activities_data): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') activities = child_activities_data['ENFANT'][0] - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities + ): content = "" with mock_getdata(content, 'ReservationAnnuelle'): with mock.patch('django.core.cache.cache.delete') as mock_cache_delete: @@ -3572,56 +4000,65 @@ def test_clae_booking_endpoint(app, resource, booking_params, child_activities_d assert 'xml_response' in resp.json['data'] with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) resp = app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params) payload = operation.call_args_list[0][0][1]['PORTAIL']['DUI'] assert payload == { 'DATEDEMANDE': '2019-09-01', - 'ENFANT': [{ - 'IDPERSONNE': '3535', - 'REGIME': 'AV', - 'ACTIVITE': [{ - 'IDACTIVITE': 'A19P1M1', - 'ANNEEREFERENCE': '2019', - 'PERIODE': [ + 'ENFANT': [ + { + 'IDPERSONNE': '3535', + 'REGIME': 'AV', + 'ACTIVITE': [ { - 'DATEDEBUT': '2020-04-13', - 'DATEDFIN': '2020-04-17', - 'SEMAINETYPE': '11011', + 'IDACTIVITE': 'A19P1M1', + 'ANNEEREFERENCE': '2019', + 'PERIODE': [ + { + 'DATEDEBUT': '2020-04-13', + 'DATEDFIN': '2020-04-17', + 'SEMAINETYPE': '11011', + }, + ], }, - ] - }, { - 'IDACTIVITE': 'A19P1M2', - 'ANNEEREFERENCE': '2019', - 'PERIODE': [ { - 'DATEDEBUT': '2020-04-13', - 'DATEDFIN': '2020-04-17', - 'SEMAINETYPE': '11011', + 'IDACTIVITE': 'A19P1M2', + 'ANNEEREFERENCE': '2019', + 'PERIODE': [ + { + 'DATEDEBUT': '2020-04-13', + 'DATEDFIN': '2020-04-17', + 'SEMAINETYPE': '11011', + }, + ], }, - ] - }, { - 'IDACTIVITE': 'A19P1M3', - 'ANNEEREFERENCE': '2019', - 'PERIODE': [ { - 'DATEDEBUT': '2020-04-13', - 'DATEDFIN': '2020-04-17', - 'SEMAINETYPE': '10000', + 'IDACTIVITE': 'A19P1M3', + 'ANNEEREFERENCE': '2019', + 'PERIODE': [ + { + 'DATEDEBUT': '2020-04-13', + 'DATEDFIN': '2020-04-17', + 'SEMAINETYPE': '10000', + }, + ], }, - ] - }, { - 'IDACTIVITE': 'A19P1M4', - 'ANNEEREFERENCE': '2019', - 'PERIODE': [ { - 'DATEDEBUT': '2020-04-13', - 'DATEDFIN': '2020-04-17', - 'SEMAINETYPE': '00100', + 'IDACTIVITE': 'A19P1M4', + 'ANNEEREFERENCE': '2019', + 'PERIODE': [ + { + 'DATEDEBUT': '2020-04-13', + 'DATEDFIN': '2020-04-17', + 'SEMAINETYPE': '00100', + }, + ], }, - ] - }] - }], + ], + } + ], 'IDDUI': 'XXX', } @@ -3632,7 +4069,9 @@ def test_clae_booking_endpoint(app, resource, booking_params, child_activities_d new_booking_params['booking_list_SOIR'] = [] new_booking_params['booking_list_GARD'] = [] with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) resp = app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=new_booking_params) payload = operation.call_args_list[0][0][1]['PORTAIL']['DUI'] assert len(payload['ENFANT']) == 1 @@ -3670,7 +4109,9 @@ def test_clae_booking_endpoint(app, resource, booking_params, child_activities_d new_booking_params['booking_list_SOIR'] = None new_booking_params['booking_list_GARD'] = None with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) resp = app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=new_booking_params) payload = operation.call_args_list[0][0][1]['PORTAIL']['DUI'] assert 'ACTIVITE' not in payload['ENFANT'][0] @@ -3680,16 +4121,22 @@ def test_clae_booking_endpoint(app, resource, booking_params, child_activities_d new_booking_params['booking_start_date'] = '2020-04-01' new_booking_params['booking_end_date'] = '2020-04-30' new_booking_params['booking_list_MAT'].append('3535:MAT:A19P1M1:2020-04-01') - new_booking_params['booking_list_MAT'].append('3535:MAT:A19P1M1:2020-03-31') # before the period, ignored + new_booking_params['booking_list_MAT'].append( + '3535:MAT:A19P1M1:2020-03-31' + ) # before the period, ignored new_booking_params['booking_list_MAT'].append('3535:MAT:A19P1M1:2020-04-30') - new_booking_params['booking_list_MAT'].append('3535:MAT:A19P1M1:2020-05-01') # after the period, ignored + new_booking_params['booking_list_MAT'].append( + '3535:MAT:A19P1M1:2020-05-01' + ) # after the period, ignored new_booking_params['booking_list_MAT'].append('4242:MAT:A19P1M1:2020-04-15') # wrong child, ignored new_booking_params['booking_list_MAT'].append('3535:MAT:FOO:2020-04-15') # wrong activity id, ignored new_booking_params['booking_list_MIDI'] = None new_booking_params['booking_list_SOIR'] = None new_booking_params['booking_list_GARD'] = None with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) with mock.patch('django.core.cache.cache.delete') as mock_cache_delete: resp = app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=new_booking_params) assert mock_cache_delete.call_args_list == [ @@ -3728,22 +4175,30 @@ def test_clae_booking_endpoint(app, resource, booking_params, child_activities_d def test_clae_booking_regime(app, resource, booking_params, child_activities_data): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') activities = child_activities_data['ENFANT'][0] - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities + ): booking_params['regime'] = None with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params) payload = operation.call_args_list[0][0][1]['PORTAIL']['DUI'] assert payload['ENFANT'][0]['REGIME'] == 'SV' booking_params['regime'] = '' with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params) payload = operation.call_args_list[0][0][1]['PORTAIL']['DUI'] assert payload['ENFANT'][0]['REGIME'] == 'SV' del booking_params['regime'] with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params) payload = operation.call_args_list[0][0][1]['PORTAIL']['DUI'] assert payload['ENFANT'][0]['REGIME'] == 'SV' @@ -3768,10 +4223,14 @@ def test_clae_booking_endpoint_next_year(app, resource, child_activities_data): 'child_id': '3535', 'regime': 'AV', } - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities') as mock_activities: + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities' + ) as mock_activities: mock_activities.return_value = activities with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params) assert mock_activities.call_args_list == [ mock.call(child_id='3535', dui='XXX', reference_year=2020), @@ -3791,7 +4250,9 @@ def test_clae_booking_annual_endpoint_axel_error(app, resource, annual_booking_p Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') with mock.patch('passerelle.contrib.toulouse_axel.schemas.enfants_activites') as operation: operation.side_effect = AxelError('FooBar') - resp = app.post_json('/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params) + resp = app.post_json( + '/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params + ) assert resp.json['err_desc'] == "Axel error: FooBar" assert resp.json['err'] == 'error' @@ -3801,7 +4262,9 @@ def test_clae_booking_annual_endpoint_axel_error(app, resource, annual_booking_p with mock_getdata(content, 'EnfantsActivites'): with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: operation.side_effect = AxelError('FooBar') - resp = app.post_json('/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params) + resp = app.post_json( + '/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params + ) assert resp.json['err_desc'] == "Axel error: FooBar" assert resp.json['err'] == 'error' @@ -3816,15 +4279,25 @@ def test_clae_booking_annual_endpoint_no_result(app, resource, annual_booking_pa def test_clae_booking_annual_endpoint(app, resource, annual_booking_params, child_activities_data): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') activities = child_activities_data['ENFANT'][0] - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities + ): content = "" with mock_getdata(content, 'ReservationAnnuelle'): with mock.patch('django.core.cache.cache.delete') as mock_cache_delete: - resp = app.post_json('/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params) + resp = app.post_json( + '/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params + ) assert len(mock_cache_delete.call_args_list) == 48 - assert mock_cache_delete.call_args_list[0] == mock.call('toulouse-axel-%s-children-activities-XXX-2019' % resource.pk) - assert mock_cache_delete.call_args_list[1] == mock.call('toulouse-axel-%s-booking-data-XXX-3535-2019-09-09' % resource.pk) - assert mock_cache_delete.call_args_list[-1] == mock.call('toulouse-axel-%s-booking-data-XXX-3535-2020-07-27' % resource.pk) + assert mock_cache_delete.call_args_list[0] == mock.call( + 'toulouse-axel-%s-children-activities-XXX-2019' % resource.pk + ) + assert mock_cache_delete.call_args_list[1] == mock.call( + 'toulouse-axel-%s-booking-data-XXX-3535-2019-09-09' % resource.pk + ) + assert mock_cache_delete.call_args_list[-1] == mock.call( + 'toulouse-axel-%s-booking-data-XXX-3535-2020-07-27' % resource.pk + ) assert resp.json['err'] == 0 assert resp.json['updated'] is True assert 'data' in resp.json @@ -3832,56 +4305,51 @@ def test_clae_booking_annual_endpoint(app, resource, annual_booking_params, chil assert 'xml_response' in resp.json['data'] with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') - resp = app.post_json('/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params) + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) + resp = app.post_json( + '/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params + ) payload = operation.call_args_list[0][0][1]['PORTAIL']['DUI'] assert payload == { 'DATEDEMANDE': '2019-09-01', - 'ENFANT': [{ - 'IDPERSONNE': '3535', - 'REGIME': 'AV', - 'ACTIVITE': [{ - 'ANNEEREFERENCE': '2019', - 'IDACTIVITE': 'A19P1M1', - 'PERIODE': [ + 'ENFANT': [ + { + 'IDPERSONNE': '3535', + 'REGIME': 'AV', + 'ACTIVITE': [ { - 'DATEDEBUT': '2019-09-09', - 'DATEDFIN': '2020-07-31', - 'SEMAINETYPE': '11011' - } - ] - }, { - 'ANNEEREFERENCE': '2019', - 'IDACTIVITE': 'A19P1M2', - 'PERIODE': [ + 'ANNEEREFERENCE': '2019', + 'IDACTIVITE': 'A19P1M1', + 'PERIODE': [ + {'DATEDEBUT': '2019-09-09', 'DATEDFIN': '2020-07-31', 'SEMAINETYPE': '11011'} + ], + }, { - 'DATEDEBUT': '2019-09-09', - 'DATEDFIN': '2020-07-31', - 'SEMAINETYPE': '11011' - } - ] - }, { - 'ANNEEREFERENCE': '2019', - 'IDACTIVITE': 'A19P1M3', - 'PERIODE': [ + 'ANNEEREFERENCE': '2019', + 'IDACTIVITE': 'A19P1M2', + 'PERIODE': [ + {'DATEDEBUT': '2019-09-09', 'DATEDFIN': '2020-07-31', 'SEMAINETYPE': '11011'} + ], + }, { - 'DATEDEBUT': '2019-09-09', - 'DATEDFIN': '2020-07-31', - 'SEMAINETYPE': '10000' - } - ] - }, { - 'ANNEEREFERENCE': '2019', - 'IDACTIVITE': 'A19P1M4', - 'PERIODE': [ + 'ANNEEREFERENCE': '2019', + 'IDACTIVITE': 'A19P1M3', + 'PERIODE': [ + {'DATEDEBUT': '2019-09-09', 'DATEDFIN': '2020-07-31', 'SEMAINETYPE': '10000'} + ], + }, { - 'DATEDEBUT': '2019-09-09', - 'DATEDFIN': '2020-07-31', - 'SEMAINETYPE': '00100' - } - ] - }] - }], + 'ANNEEREFERENCE': '2019', + 'IDACTIVITE': 'A19P1M4', + 'PERIODE': [ + {'DATEDEBUT': '2019-09-09', 'DATEDFIN': '2020-07-31', 'SEMAINETYPE': '00100'} + ], + }, + ], + } + ], 'IDDUI': 'XXX', } @@ -3892,8 +4360,12 @@ def test_clae_booking_annual_endpoint(app, resource, annual_booking_params, chil new_booking_params['booking_list_SOIR'] = [] new_booking_params['booking_list_GARD'] = [] with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') - resp = app.post_json('/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=new_booking_params) + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) + resp = app.post_json( + '/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=new_booking_params + ) payload = operation.call_args_list[0][0][1]['PORTAIL']['DUI'] assert len(payload['ENFANT']) == 1 assert payload['ENFANT'][0]['IDPERSONNE'] == '3535' @@ -3930,8 +4402,12 @@ def test_clae_booking_annual_endpoint(app, resource, annual_booking_params, chil new_booking_params['booking_list_SOIR'] = None new_booking_params['booking_list_GARD'] = None with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') - resp = app.post_json('/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=new_booking_params) + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) + resp = app.post_json( + '/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=new_booking_params + ) payload = operation.call_args_list[0][0][1]['PORTAIL']['DUI'] assert 'ACTIVITE' not in payload['ENFANT'][0] @@ -3954,9 +4430,13 @@ def test_clae_booking_annual_next_year(app, resource, child_activities_data): 'regime': 'AV', 'booking_date': '2020-08-01', } - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities + ): with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) app.post_json('/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params) payload = operation.call_args_list[0][0][1]['PORTAIL']['DUI'] assert payload['ENFANT'][0]['ACTIVITE'][0]['PERIODE'][0]['DATEDEBUT'] == '2020-08-01' @@ -3967,22 +4447,30 @@ def test_clae_booking_annual_next_year(app, resource, child_activities_data): def test_clae_booking_annual_regime(app, resource, annual_booking_params, child_activities_data): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') activities = child_activities_data['ENFANT'][0] - with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities): + with mock.patch( + 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities + ): annual_booking_params['regime'] = None with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) app.post_json('/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params) payload = operation.call_args_list[0][0][1]['PORTAIL']['DUI'] assert payload['ENFANT'][0]['REGIME'] == 'SV' annual_booking_params['regime'] = '' with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) app.post_json('/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params) payload = operation.call_args_list[0][0][1]['PORTAIL']['DUI'] assert payload['ENFANT'][0]['REGIME'] == 'SV' del annual_booking_params['regime'] with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: - operation.return_value = schemas.OperationResult(json_response={}, xml_request='', xml_response='') + operation.return_value = schemas.OperationResult( + json_response={}, xml_request='', xml_response='' + ) app.post_json('/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params) payload = operation.call_args_list[0][0][1]['PORTAIL']['DUI'] assert payload['ENFANT'][0]['REGIME'] == 'SV' diff --git a/tests/test_toulouse_axel_schema.py b/tests/test_toulouse_axel_schema.py index 9bd7b78a..fead4fae 100644 --- a/tests/test_toulouse_axel_schema.py +++ b/tests/test_toulouse_axel_schema.py @@ -22,8 +22,8 @@ import pytest import xmlschema XSD_BASE_DIR = os.path.join( - os.path.dirname(os.path.abspath(__file__)), - '../passerelle/contrib/toulouse_axel/xsd') + os.path.dirname(os.path.abspath(__file__)), '../passerelle/contrib/toulouse_axel/xsd' +) @pytest.mark.parametrize('date_type', ['DATEREQUIREDType', 'DATEType']) @@ -37,7 +37,9 @@ def test_date_mapping(date_type): -""".format(path=XSD_BASE_DIR, date_type=date_type) +""".format( + path=XSD_BASE_DIR, date_type=date_type + ) schema = AxelSchema(xsd, 'PORTAIL') xml_data = schema.encode({'PORTAIL': {'DATE': '2019-12-12'}}) @@ -61,28 +63,31 @@ def test_date_mapping(date_type): @pytest.mark.parametrize('bool_type', ['OUINONREQUIREDType', 'OUINONType']) -@pytest.mark.parametrize('value, expected, py_expected', [ - ('OUI', 'OUI', True), - ('oui', 'OUI', True), - ('Oui', 'OUI', True), - ('TRUE', 'OUI', True), - ('true', 'OUI', True), - ('True', 'OUI', True), - (True, 'OUI', True), - ('1', 'OUI', True), - ('NON', 'NON', False), - ('non', 'NON', False), - ('Non', 'NON', False), - ('FALSE', 'NON', False), - ('false', 'NON', False), - ('False', 'NON', False), - (False, 'NON', False), - ('0', 'NON', False), - ('FOOBAR', 'FOOBAR', None), - ('42', '42', None), - ('OUIFOOBAR', 'OUIFOOBAR', None), - ('FOONONBAR', 'FOONONBAR', None), -]) +@pytest.mark.parametrize( + 'value, expected, py_expected', + [ + ('OUI', 'OUI', True), + ('oui', 'OUI', True), + ('Oui', 'OUI', True), + ('TRUE', 'OUI', True), + ('true', 'OUI', True), + ('True', 'OUI', True), + (True, 'OUI', True), + ('1', 'OUI', True), + ('NON', 'NON', False), + ('non', 'NON', False), + ('Non', 'NON', False), + ('FALSE', 'NON', False), + ('false', 'NON', False), + ('False', 'NON', False), + (False, 'NON', False), + ('0', 'NON', False), + ('FOOBAR', 'FOOBAR', None), + ('42', '42', None), + ('OUIFOOBAR', 'OUIFOOBAR', None), + ('FOONONBAR', 'FOONONBAR', None), + ], +) def test_bool_mapping(bool_type, value, expected, py_expected): if expected == '' and bool_type == 'OUINONREQUIREDType': # required, can't be empty @@ -97,7 +102,9 @@ def test_bool_mapping(bool_type, value, expected, py_expected): -""".format(path=XSD_BASE_DIR, bool_type=bool_type) +""".format( + path=XSD_BASE_DIR, bool_type=bool_type + ) schema = AxelSchema(xsd, 'PORTAIL') xml_data = schema.encode({'PORTAIL': {'BOOL': value}}) diff --git a/tests/test_toulouse_axel_utils.py b/tests/test_toulouse_axel_utils.py index e14ebb7d..ffe87d66 100644 --- a/tests/test_toulouse_axel_utils.py +++ b/tests/test_toulouse_axel_utils.py @@ -40,36 +40,45 @@ def test_encode_datetime(): assert encode_datetime(parse_datetime('2019-12-12T23:40:42')) == '13/12/2019 00:40:42' -@pytest.mark.parametrize('value, expected', [ - ('0', False), - ('1', True), - ('2', None), - ('foo', False), -]) +@pytest.mark.parametrize( + 'value, expected', + [ + ('0', False), + ('1', True), + ('2', None), + ('foo', False), + ], +) def test_get_booking(value, expected): assert get_booking(value) is expected -@pytest.mark.parametrize('value, expected', [ - ('2020-01-01', 2019), - ('2020-07-31', 2019), - ('2020-08-01', 2020), - ('2020-12-31', 2020), -]) +@pytest.mark.parametrize( + 'value, expected', + [ + ('2020-01-01', 2019), + ('2020-07-31', 2019), + ('2020-08-01', 2020), + ('2020-12-31', 2020), + ], +) def test_get_reference_year_from_date(value, expected): in_date = datetime.datetime.strptime(value, json_date_format) assert get_reference_year_from_date(in_date) == expected -@pytest.mark.parametrize('in_value, start_value, end_value', [ - ('2020-01-06', '2020-01-06', '2020-01-10'), # monday - ('2020-01-07', '2020-01-06', '2020-01-10'), # tuesday - ('2020-01-08', '2020-01-06', '2020-01-10'), # wednesday - ('2020-01-09', '2020-01-06', '2020-01-10'), # thursday - ('2020-01-10', '2020-01-06', '2020-01-10'), # friday - ('2020-01-11', '2020-01-06', '2020-01-10'), # saturday - ('2020-01-12', '2020-01-06', '2020-01-10'), # sunday -]) +@pytest.mark.parametrize( + 'in_value, start_value, end_value', + [ + ('2020-01-06', '2020-01-06', '2020-01-10'), # monday + ('2020-01-07', '2020-01-06', '2020-01-10'), # tuesday + ('2020-01-08', '2020-01-06', '2020-01-10'), # wednesday + ('2020-01-09', '2020-01-06', '2020-01-10'), # thursday + ('2020-01-10', '2020-01-06', '2020-01-10'), # friday + ('2020-01-11', '2020-01-06', '2020-01-10'), # saturday + ('2020-01-12', '2020-01-06', '2020-01-10'), # sunday + ], +) def test_get_week_dates_from_date(in_value, start_value, end_value): in_date = datetime.datetime.strptime(in_value, json_date_format) start_date = datetime.datetime.strptime(start_value, json_date_format) diff --git a/tests/test_utils_json.py b/tests/test_utils_json.py index 83d20b1b..24a2d547 100644 --- a/tests/test_utils_json.py +++ b/tests/test_utils_json.py @@ -49,12 +49,14 @@ def test_unflatten_base(): def test_unflatten_dict(): - assert unflatten({ - 'a' + SEP + 'b' + SEP + '0': 1, - 'a' + SEP + 'c' + SEP + '1': 'a', - 'a' + SEP + 'b' + SEP + '1': True, - 'a' + SEP + 'c' + SEP + '0': [1], - }) == { + assert unflatten( + { + 'a' + SEP + 'b' + SEP + '0': 1, + 'a' + SEP + 'c' + SEP + '1': 'a', + 'a' + SEP + 'b' + SEP + '1': True, + 'a' + SEP + 'c' + SEP + '0': [1], + } + ) == { 'a': { 'b': [1, True], 'c': [[1], 'a'], @@ -63,28 +65,28 @@ def test_unflatten_dict(): def test_unflatten_array(): - assert unflatten({ - '0' + SEP + 'b' + SEP + '0': 1, - '1' + SEP + 'c' + SEP + '1': 'a', - '0' + SEP + 'b' + SEP + '1': True, - '1' + SEP + 'c' + SEP + '0': [1], - }) == [{'b': [1, True]}, - {'c': [[1], 'a']}] + assert ( + unflatten( + { + '0' + SEP + 'b' + SEP + '0': 1, + '1' + SEP + 'c' + SEP + '1': 'a', + '0' + SEP + 'b' + SEP + '1': True, + '1' + SEP + 'c' + SEP + '0': [1], + } + ) + == [{'b': [1, True]}, {'c': [[1], 'a']}] + ) def test_unflatten_missing_final_index(): with pytest.raises(ValueError) as exc_info: - unflatten({ - '1': 1 - }) + unflatten({'1': 1}) assert 'incomplete' in exc_info.value.args[0] def test_unflatten_missing_intermediate_index(): with pytest.raises(ValueError) as exc_info: - unflatten({ - 'a' + SEP + '1' + SEP + 'b': 1 - }) + unflatten({'a' + SEP + '1' + SEP + 'b': 1}) assert 'incomplete' in exc_info.value.args[0] @@ -104,11 +106,11 @@ def test_flatten_array_schema(): 'type': 'array', 'items': { 'type': 'integer', - } - } + }, + }, }, 'additionalProperties': False, - } + }, } flattened_schema = flatten_json_schema(schema) data = [ @@ -149,23 +151,25 @@ def test_flatten_array_schema(): def test_flatten_dict_schema(): - assert flatten_json_schema({ - 'type': 'object', - 'properties': { - 'a': { - 'type': 'string', - }, - 'b': { - 'type': 'integer', - }, - 'c': { - 'type': 'array', - 'items': { + assert flatten_json_schema( + { + 'type': 'object', + 'properties': { + 'a': { + 'type': 'string', + }, + 'b': { 'type': 'integer', - } - } + }, + 'c': { + 'type': 'array', + 'items': { + 'type': 'integer', + }, + }, + }, } - }) == { + ) == { 'type': 'object', 'description': 'flattened schema *never* use for validation', 'properties': { diff --git a/tests/test_utils_sftp.py b/tests/test_utils_sftp.py index a32f7f83..7c80f8b8 100644 --- a/tests/test_utils_sftp.py +++ b/tests/test_utils_sftp.py @@ -27,19 +27,13 @@ from passerelle.utils.sftp import SFTP, SFTPFormField, SFTPField @pytest.fixture def ssh_key(): - with open( - os.path.join( - os.path.dirname(__file__), - 'ssh_key'), 'rb') as fd: + with open(os.path.join(os.path.dirname(__file__), 'ssh_key'), 'rb') as fd: yield fd.read() @pytest.fixture def ssh_key_with_password(): - with open( - os.path.join( - os.path.dirname(__file__), - 'ssh_key_with_password'), 'rb') as fd: + with open(os.path.join(os.path.dirname(__file__), 'ssh_key_with_password'), 'rb') as fd: yield fd.read() @@ -55,13 +49,17 @@ def test_missing_hostname(sftpserver): def test_sftp_ok(sftpserver): with sftpserver.serve_content({'DILA': {'a.zip': 'a'}}): - with SFTP('sftp://john:doe@{server.host}:{server.port}/DILA/'.format(server=sftpserver)).client() as sftp: + with SFTP( + 'sftp://john:doe@{server.host}:{server.port}/DILA/'.format(server=sftpserver) + ).client() as sftp: assert sftp.listdir() == ['a.zip'] def test_sftp_bad_paths(sftpserver): with sftpserver.serve_content({'DILA': {'a.zip': 'a'}}): - with SFTP('sftp://john:doe@{server.host}:{server.port}/DILA/'.format(server=sftpserver)).client() as sftp: + with SFTP( + 'sftp://john:doe@{server.host}:{server.port}/DILA/'.format(server=sftpserver) + ).client() as sftp: with pytest.raises(ValueError): sftp.chdir('..') with pytest.raises(ValueError): @@ -70,7 +68,6 @@ def test_sftp_bad_paths(sftpserver): sftp.chdir('/coin') - def test_form_field(sftpserver, ssh_key, ssh_key_with_password): from django import forms @@ -114,7 +111,8 @@ def test_form_field(sftpserver, ssh_key, ssh_key_with_password): form = Form( data={'sftp_0': url}, - files={'sftp_1': SimpleUploadedFile('ssh_key', ssh_key, 'application/octet-stream')}) + files={'sftp_1': SimpleUploadedFile('ssh_key', ssh_key, 'application/octet-stream')}, + ) assert form.is_valid() sftp = form.cleaned_data['sftp'] assert isinstance(sftp, SFTP) @@ -132,11 +130,13 @@ def test_form_field(sftpserver, ssh_key, ssh_key_with_password): assert not form.is_valid() assert 'key invalid' in str(form.errors) - form = Form(data={ - 'sftp_0': url, - 'sftp_2': force_text(ssh_key_with_password, 'ascii'), - 'sftp_3': 'coucou', - }) + form = Form( + data={ + 'sftp_0': url, + 'sftp_2': force_text(ssh_key_with_password, 'ascii'), + 'sftp_3': 'coucou', + } + ) assert form.is_valid() with form.cleaned_data['sftp'].client() as sftp: assert sftp.listdir() == ['a.zip'] diff --git a/tests/test_utils_xml.py b/tests/test_utils_xml.py index 19669eb9..cdca4f01 100644 --- a/tests/test_utils_xml.py +++ b/tests/test_utils_xml.py @@ -30,7 +30,8 @@ def test_text_content(): def test_to_json(): - root = ET.fromstring(''' + root = ET.fromstring( + ''' 1 2 @@ -43,14 +44,15 @@ def test_to_json(): -''') +''' + ) assert to_json(root) == { 'text1': '1', 'text2': '2', 'enfants': [ {'text3': '3'}, {'text3': '4'}, - ] + ], } diff --git a/tests/test_utils_zip.py b/tests/test_utils_zip.py index e6ccad6c..4e31441d 100644 --- a/tests/test_utils_zip.py +++ b/tests/test_utils_zip.py @@ -24,7 +24,12 @@ import zipfile import pytest -from passerelle.utils.zip import ZipTemplate, ZipTemplateDoesNotExist, ZipTemplateSyntaxError, ZipTemplateError +from passerelle.utils.zip import ( + ZipTemplate, + ZipTemplateDoesNotExist, + ZipTemplateSyntaxError, + ZipTemplateError, +) @pytest.fixture @@ -52,18 +57,23 @@ def tpl_builder(templates_path): name = '%s.xml' % uuid.uuid4().hex with (templates_path / name).open('w') as fd: fd.write(content) - d['part_templates'].append({ - 'name_template': name_template, - 'template_path': name, - }) + d['part_templates'].append( + { + 'name_template': name_template, + 'template_path': name, + } + ) for name_template, content in content_parts: - d['part_templates'].append({ - 'name_template': name_template, - 'content_expression': content, - }) + d['part_templates'].append( + { + 'name_template': name_template, + 'content_expression': content, + } + ) with manifest_path.open('w') as fd: json.dump(d, fd) return '%s/%s' % (templates_path.basename, manifest_name) + return make @@ -93,22 +103,22 @@ def test_invalid(templates_path): def test_syntax_error(tpl_builder, dest): - zip_template = ZipTemplate(tpl_builder('{{ name -{{ counter }}.zip'), ctx={'name': 'coucou', 'counter': 10}) + zip_template = ZipTemplate( + tpl_builder('{{ name -{{ counter }}.zip'), ctx={'name': 'coucou', 'counter': 10} + ) with pytest.raises(ZipTemplateSyntaxError): zip_template.render_to_path(dest) zip_template = ZipTemplate( - tpl_builder( - '{{ name }}-{{ counter }}.zip', - template_parts=[('part1.xml', '{{ name {{ }}')]), - ctx={'name': 'coucou', 'counter': 10}) + tpl_builder('{{ name }}-{{ counter }}.zip', template_parts=[('part1.xml', '{{ name {{ }}')]), + ctx={'name': 'coucou', 'counter': 10}, + ) with pytest.raises(ZipTemplateSyntaxError): zip_template.render_to_path(dest) def test_no_parts(tpl_builder, dest): - z = ZipTemplate(tpl_builder('{{ name }}-{{ counter }}.zip'), - ctx={'name': 'coucou', 'counter': 10}) + z = ZipTemplate(tpl_builder('{{ name }}-{{ counter }}.zip'), ctx={'name': 'coucou', 'counter': 10}) z.render_to_path(dest) full_path = dest / 'coucou-10.zip' @@ -121,11 +131,13 @@ def test_with_parts(tpl_builder, dest): z = ZipTemplate( tpl_builder( '{{ name }}-{{ counter }}.zip', - template_parts=[('{{ name }}-{{ counter }}-part1.xml', - '{{ bo_dy|lower }}')], + template_parts=[ + ('{{ name }}-{{ counter }}-part1.xml', '{{ bo_dy|lower }}') + ], content_parts=[('{{ name }}-{{ counter }}-dôc.xml', 'doc-content')], ), - ctx={'name': 'coucou', 'counter': 10, 'bo_dy': 'blabla', 'doc-content': 'Héllo World!'}) + ctx={'name': 'coucou', 'counter': 10, 'bo_dy': 'blabla', 'doc-content': 'Héllo World!'}, + ) z.render_to_path(dest) for part in z.parts: str(part) @@ -134,13 +146,19 @@ def test_with_parts(tpl_builder, dest): with full_path.open('rb') as fd: with zipfile.ZipFile(fd) as zi: assert zi.namelist() == ['coucou-10-part1.xml', 'coucou-10-dôc.xml'] - assert zi.open('coucou-10-part1.xml').read().decode('utf-8') == 'blabla' + assert ( + zi.open('coucou-10-part1.xml').read().decode('utf-8') + == 'blabla' + ) assert zi.open('coucou-10-dôc.xml').read().decode('utf-8') == 'Héllo World!' with io.BytesIO(z.render_to_bytes()) as fd: with zipfile.ZipFile(fd) as zi: assert zi.namelist() == ['coucou-10-part1.xml', 'coucou-10-dôc.xml'] - assert zi.open('coucou-10-part1.xml').read().decode('utf-8') == 'blabla' + assert ( + zi.open('coucou-10-part1.xml').read().decode('utf-8') + == 'blabla' + ) assert zi.open('coucou-10-dôc.xml').read().decode('utf-8') == 'Héllo World!' @@ -150,7 +168,8 @@ def test_xml_error(tpl_builder, dest): 'rien.zip', content_parts=[('rien.xml', 'doc-content')], ), - ctx={'doc-content': 'Héllo World!'}) + ctx={'doc-content': 'Héllo World!'}, + ) with pytest.raises(ZipTemplateSyntaxError) as exc_info: z.render_to_bytes() assert 'XML syntax error' in exc_info.value.args[0] diff --git a/tests/test_vivaticket.py b/tests/test_vivaticket.py index 7fb72f95..26525f43 100644 --- a/tests/test_vivaticket.py +++ b/tests/test_vivaticket.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- import mock import pytest @@ -219,9 +218,11 @@ SCHOOL_LEVELS_RESPONSE = """{ @pytest.fixture def connector(db): - return utils.setup_access_rights(VivaTicket.objects.create( - slug='test', login='foo', password='bar', - url='http://example.net/vivaticket')) + return utils.setup_access_rights( + VivaTicket.objects.create( + slug='test', login='foo', password='bar', url='http://example.net/vivaticket' + ) + ) @mock.patch('passerelle.utils.Request.post') @@ -315,8 +316,9 @@ def test_get_or_create_contact(mocked_get, mocked_put, mocked_post, app, connect mocked_get.return_value = utils.FakedResponse(content='', ok=False) mocked_post.side_effect = [ utils.FakedResponse(content=KEY_RESPONSE, status_code=200), - utils.FakedResponse(content='{"InternalCode": "0000000273", "ReturnCode": 0, "Error": null}', - status_code=200), + utils.FakedResponse( + content='{"InternalCode": "0000000273", "ReturnCode": 0, "Error": null}', status_code=200 + ), ] assert connector.get_or_create_contact({'email': 'foo@example.com'}) == {'InternalCode': '0000000273'} assert mocked_get.call_args[1]['params']['externalCode'] == 'b48def645758b95537d4' @@ -326,8 +328,8 @@ def test_get_or_create_contact(mocked_get, mocked_put, mocked_post, app, connect assert mocked_post.call_args[1]['json']['Contact']['Email'] == 'foo@example.com' assert mocked_post.call_args[1]['json']['Contact']['ExternalCode'] == 'b48def645758b95537d4' mocked_post.return_value = utils.FakedResponse( - content='{"InternalCode": "0000000273", "ReturnCode": 0, "Error": null}', - status_code=200) + content='{"InternalCode": "0000000273", "ReturnCode": 0, "Error": null}', status_code=200 + ) mocked_get.return_value = utils.FakedResponse(content=CONTACT_RESPONSE, ok=True) mocked_put.return_value = utils.FakedResponse(content='', status_code=204) connector.get_or_create_contact({'email': 'foo@example.com'}) @@ -378,7 +380,9 @@ def test_book(mocked_get, mocked_put, mocked_post, app, connector): assert response.json['data']['bookingCode'] == 'II0000013' # no more need to fake key response because the key is in cache - mocked_post.side_effect = [utils.FakedResponse(text='This external booking code already exists.', ok=False)] + mocked_post.side_effect = [ + utils.FakedResponse(text='This external booking code already exists.', ok=False) + ] response = app.post_json(url, params=payload) assert response.json['err'] == 1 assert response.json['err_desc'] == 'This external booking code already exists.' diff --git a/tests/utils.py b/tests/utils.py index 503ab426..1c0f5a80 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -12,16 +12,13 @@ from passerelle.compat import json_loads def generic_endpoint_url(connector, endpoint, slug='test'): - return reverse('generic-endpoint', kwargs={ - 'connector': connector, 'slug': slug, 'endpoint': endpoint}) + return reverse('generic-endpoint', kwargs={'connector': connector, 'slug': slug, 'endpoint': endpoint}) def setup_access_rights(obj): - api = ApiUser.objects.create(username='all', - keytype='', key='') + api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(obj) - AccessRight.objects.create(codename='can_access', apiuser=api, - resource_type=obj_type, resource_pk=obj.pk) + AccessRight.objects.create(codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=obj.pk) return obj @@ -50,6 +47,7 @@ def mock_url(url=None, response='', status_code=200, headers=None, exception=Non if exception: raise exception return httmock.response(status_code, response, headers, request=request) + return httmock.HTTMock(mocked) @@ -61,8 +59,7 @@ def make_resource(model_class, **kwargs): def endpoint_get(expected_url, app, resource, endpoint, **kwargs): url = generic_endpoint_url( - connector=resource.__class__.get_connector_slug(), - endpoint=endpoint, - slug=resource.slug) + connector=resource.__class__.get_connector_slug(), endpoint=endpoint, slug=resource.slug + ) assert url == expected_url, 'endpoint URL has changed' return app.get(url, **kwargs) diff --git a/tests/wcs/conftest.py b/tests/wcs/conftest.py index 149fbce9..058090a5 100644 --- a/tests/wcs/conftest.py +++ b/tests/wcs/conftest.py @@ -128,14 +128,15 @@ class WcsHost(object): sys.path.append(os.path.dirname(WCSCTL)) try: import wcs.publisher + wcs.publisher.WcsPublisher.APP_DIR = self.wcs.app_dir - publisher = wcs.publisher.WcsPublisher.create_publisher( - register_tld_names=False) + publisher = wcs.publisher.WcsPublisher.create_publisher(register_tld_names=False) publisher.app_dir = self.app_dir publisher.set_config() func() except Exception as e: import traceback + pipe_in.send((e, traceback.format_exc())) pipe_in.close() # FIXME: send exception to parent @@ -206,12 +207,14 @@ class WcsHost(object): @property def api(self): from passerelle.utils import wcs + self.add_api_secret('test', 'test') return wcs.WcsApi(self.url, name_id='xxx', orig='test', key='test') @property def anonym_api(self): from passerelle.utils import wcs + self.add_api_secret('test', 'test') return wcs.WcsApi(self.url, orig='test', key='test') @@ -246,12 +249,16 @@ def wcs_factory(base_dir, wcs_class=Wcs, **kwargs): wcs_cfg_path = os.path.join(base_dir, 'wcs.cfg') with open(wcs_cfg_path, 'w') as fd: - fd.write(u'''[main] -app_dir = %s\n''' % tmp_app_dir) + fd.write( + u'''[main] +app_dir = %s\n''' + % tmp_app_dir + ) local_settings_path = os.path.join(base_dir, 'local_settings.py') with open(local_settings_path, 'w') as fd: - fd.write(u''' + fd.write( + u''' WCS_LEGACY_CONFIG_FILE = '{base_dir}/wcs.cfg' THEMES_DIRECTORY = '/' ALLOWED_HOSTS = ['*'] @@ -272,7 +279,10 @@ LOGGING = {{ }}, }}, }} -'''.format(base_dir=base_dir)) +'''.format( + base_dir=base_dir + ) + ) address = '0.0.0.0' port = find_free_tcp_port() @@ -284,7 +294,10 @@ LOGGING = {{ os.chdir(os.path.dirname(WCSCTL)) os.environ['DJANGO_SETTINGS_MODULE'] = 'wcs.settings' os.environ['WCS_SETTINGS_FILE'] = local_settings_path - os.execvp('python', ['python', 'manage.py', 'runserver', '--insecure', '--noreload', '%s:%s' % (address, port)]) + os.execvp( + 'python', + ['python', 'manage.py', 'runserver', '--insecure', '--noreload', '%s:%s' % (address, port)], + ) os._exit(0) # verify w.c.s. is launched @@ -371,13 +384,18 @@ class DefaultWcsHost(WcsHost): formdef.workflow_roles = {'_receiver': role.id} formdef.fields = [ fields.StringField(id='1', label='1st field', type='string', anonymise=False, varname='string'), - fields.ItemField(id='2', label='2nd field', type='item', - items=['foo', 'bar', 'baz'], varname='item'), + fields.ItemField( + id='2', label='2nd field', type='item', items=['foo', 'bar', 'baz'], varname='item' + ), fields.BoolField(id='3', label='3rd field', type='bool', varname='bool'), fields.ItemField(id='4', label='4rth field', type='item', varname='item_open'), - fields.ItemField(id='5', label='5th field', type='item', - varname='item_datasource', - data_source={'type': 'json', 'value': 'http://datasource.com/'}), + fields.ItemField( + id='5', + label='5th field', + type='item', + varname='item_datasource', + data_source={'type': 'json', 'value': 'http://datasource.com/'}, + ), ] formdef.store() @@ -387,9 +405,7 @@ class DefaultWcsHost(WcsHost): formdata = formdef.data_class()() formdata.just_created() formdata.receipt_time = datetime.datetime( - 2018, - random.randrange(1, 13), - random.randrange(1, 29) + 2018, random.randrange(1, 13), random.randrange(1, 29) ).timetuple() formdata.data = {'1': 'FOO BAR %d' % i} if i % 4 == 0: @@ -429,10 +445,11 @@ def datasource(): 'data': [ {'id': '1', 'text': 'hello'}, {'id': '2', 'text': 'world'}, - ] + ], }, 'content-type': 'application/json', } + with httmock.HTTMock(handler): yield @@ -446,5 +463,5 @@ def wcs(tmp_path_factory): @pytest.fixture def wcs_host(wcs, postgres_db, datasource): - with wcs.host('127.0.0.1', database=postgres_db.database) as wcs_host: - yield wcs_host + with wcs.host('127.0.0.1', database=postgres_db.database) as wcs_host: + yield wcs_host diff --git a/tests/wcs/test_conftest.py b/tests/wcs/test_conftest.py index a83b00e8..beb1739e 100644 --- a/tests/wcs/test_conftest.py +++ b/tests/wcs/test_conftest.py @@ -56,15 +56,14 @@ def test_wcs_api(wcs_host): } submitter.set('string', 'hello') submitter.set('item', 'foo') - submitter.set('item_open', { - 'id': '1', - 'text': 'world', - 'foo': 'bar' - }) - submitter.set('item_datasource', { - 'id': '2', - 'text': 'world', - }) + submitter.set('item_open', {'id': '1', 'text': 'world', 'foo': 'bar'}) + submitter.set( + 'item_datasource', + { + 'id': '2', + 'text': 'world', + }, + ) formdata = formdef.formdatas[submitter.result.id] api = wcs_host.anonym_api diff --git a/tests/wcs/test_sp_fr.py b/tests/wcs/test_sp_fr.py index da152999..b35ed1b2 100644 --- a/tests/wcs/test_sp_fr.py +++ b/tests/wcs/test_sp_fr.py @@ -51,7 +51,7 @@ def spfr(settings, wcs_host, db, sftpserver): slug='test1', description='Connecteur de test', input_sftp=SFTP('sftp://john:doe@{server.host}:{server.port}/DILA/'.format(server=sftpserver)), - output_sftp=SFTP('sftp://john:doe@{server.host}:{server.port}/DILA/'.format(server=sftpserver)) + output_sftp=SFTP('sftp://john:doe@{server.host}:{server.port}/DILA/'.format(server=sftpserver)), ) @@ -75,7 +75,9 @@ def test_import_export(spfr): serialization = spfr.export_json() spfr.delete() new_spfr = spfr.__class__.import_json(serialization) - assert (dict(spfr.__dict__, _state=None, id=None, logger=None) - == dict(new_spfr.__dict__, id=None, logger=None, _state=None)) - assert (dict(new_spfr.mappings.get().__dict__, _resource_cache=None, resource_id=None, id=None, _state=None) - == dict(mapping.__dict__, _resource_cache=None, resource_id=None, id=None, _state=None)) + assert dict(spfr.__dict__, _state=None, id=None, logger=None) == dict( + new_spfr.__dict__, id=None, logger=None, _state=None + ) + assert dict( + new_spfr.mappings.get().__dict__, _resource_cache=None, resource_id=None, id=None, _state=None + ) == dict(mapping.__dict__, _resource_cache=None, resource_id=None, id=None, _state=None)