diff --git a/functests/caluire_axel/conftest.py b/functests/caluire_axel/conftest.py index 526c2dfe..ae988520 100644 --- a/functests/caluire_axel/conftest.py +++ b/functests/caluire_axel/conftest.py @@ -2,23 +2,23 @@ import pytest def pytest_addoption(parser): - parser.addoption("--url", help="Url of a passerelle Caluire Axel connector instance") - parser.addoption("--nameid", help="Publik Name ID") - parser.addoption("--firstname", help="first name of a user") - parser.addoption("--lastname", help="Last name of a user") - parser.addoption("--family", help="Family ID") + parser.addoption('--url', help='Url of a passerelle Caluire Axel connector instance') + parser.addoption('--nameid', help='Publik Name ID') + parser.addoption('--firstname', help='first name of a user') + parser.addoption('--lastname', help='Last name of a user') + parser.addoption('--family', help='Family ID') @pytest.fixture(scope='session') def conn(request): - return request.config.getoption("--url") + return request.config.getoption('--url') @pytest.fixture(scope='session') def user(request): return { - 'name_id': request.config.getoption("--nameid"), - 'first_name': request.config.getoption("--firstname"), - 'last_name': request.config.getoption("--lastname"), - 'family': request.config.getoption("--family"), + 'name_id': request.config.getoption('--nameid'), + 'first_name': request.config.getoption('--firstname'), + 'last_name': request.config.getoption('--lastname'), + 'family': request.config.getoption('--family'), } diff --git a/functests/caluire_axel/test_caluire_axel.py b/functests/caluire_axel/test_caluire_axel.py index 80937f15..8a7ee778 100644 --- a/functests/caluire_axel/test_caluire_axel.py +++ b/functests/caluire_axel/test_caluire_axel.py @@ -12,7 +12,7 @@ def test_link(conn, user): 'NOM': user['last_name'], 'PRENOM': user['first_name'], } - print("Creating link with the following payload:") + print('Creating link with the following payload:') pprint.pprint(payload) resp = requests.post(url, json=payload) resp.raise_for_status() @@ -21,7 +21,7 @@ def test_link(conn, user): assert res['err'] == 0 print('\n') - print("GET family info") + print('GET family info') url = conn + '/family_info?NameID=%s' % name_id resp = requests.get(url) resp.raise_for_status() @@ -30,7 +30,7 @@ def test_link(conn, user): assert data['err'] == 0 print('\n') - print("GET children info") + print('GET children info') url = conn + '/children_info?NameID=%s' % (name_id) resp = requests.get(url) resp.raise_for_status() @@ -40,7 +40,7 @@ def test_link(conn, user): print('\n') for child in data['data']['MEMBRE']: - print("GET child info") + print('GET child info') url = conn + '/child_info?NameID=%s&idpersonne=%s' % (name_id, child['IDENT']) resp = requests.get(url) resp.raise_for_status() @@ -49,7 +49,7 @@ def test_link(conn, user): assert res['err'] == 0 print('\n') - print("and GET school info") + print('and GET school info') url = conn + '/child_schooling_info?NameID=%s&idpersonne=%s&schooling_date=%s' % ( name_id, child['IDENT'], @@ -62,7 +62,7 @@ def test_link(conn, user): assert res['err'] == 0 print('\n') - print("and GET activities info") + print('and GET activities info') url = conn + '/child_activities_info?NameID=%s&idpersonne=%s&schooling_date=%s' % ( name_id, child['IDENT'], @@ -75,7 +75,7 @@ def test_link(conn, user): assert res['err'] == 0 print('\n') - print("GET school list") + print('GET school list') url = conn + '/school_list' payload = { 'num': data['data']['RESPONSABLE1']['ADRESSE']['NORUE'], @@ -92,7 +92,7 @@ def test_link(conn, user): print('\n') return - print("Deleting link") + print('Deleting link') url = conn + '/unlink?NameID=%s' % name_id resp = requests.post(url) resp.raise_for_status() diff --git a/functests/cmis/conftest.py b/functests/cmis/conftest.py index 8d2d7c24..a7b8d4a2 100644 --- a/functests/cmis/conftest.py +++ b/functests/cmis/conftest.py @@ -5,25 +5,25 @@ import pytest def pytest_addoption(parser): - parser.addoption("--cmis-connector-url", help="Url of a passerelle CMIS connector instance") - parser.addoption("--cmis-endpoint", help="Url of a passerelle CMIS endpoint") - parser.addoption("--cmis-username", help="Username for the CMIS endpoint") - parser.addoption("--cmis-password", help="Password for the CMIS endpoint") - parser.addoption("--preserve-tree", action="store_true", default=False, help="Preserve test directory") + parser.addoption('--cmis-connector-url', help='Url of a passerelle CMIS connector instance') + parser.addoption('--cmis-endpoint', help='Url of a passerelle CMIS endpoint') + parser.addoption('--cmis-username', help='Username for the CMIS endpoint') + parser.addoption('--cmis-password', help='Password for the CMIS endpoint') + parser.addoption('--preserve-tree', action='store_true', default=False, help='Preserve test directory') @pytest.fixture(scope='session') def cmisclient(request): return cmislib.CmisClient( - request.config.getoption("--cmis-endpoint"), - request.config.getoption("--cmis-username"), - request.config.getoption("--cmis-password"), + request.config.getoption('--cmis-endpoint'), + request.config.getoption('--cmis-username'), + request.config.getoption('--cmis-password'), ) @pytest.fixture(scope='session') def cmis_connector(request): - return request.config.getoption("--cmis-connector-url") + return request.config.getoption('--cmis-connector-url') @pytest.fixture(scope='session') @@ -31,6 +31,6 @@ def cmis_tmpdir(cmisclient, request): path = 'test-%s' % random.randint(0, 10000) folder = cmisclient.defaultRepository.rootFolder.createFolder(path) yield folder.properties['cmis:path'] - preserve_tree = request.config.getoption("--preserve-tree") + preserve_tree = request.config.getoption('--preserve-tree') if not preserve_tree: folder.deleteTree() diff --git a/functests/cmis/tests_cmis.py b/functests/cmis/tests_cmis.py index f3cf5c04..caa0b110 100644 --- a/functests/cmis/tests_cmis.py +++ b/functests/cmis/tests_cmis.py @@ -10,7 +10,7 @@ SPECIAL_CHARS = '!#$%&+-^_`;[]{}+=' @pytest.mark.parametrize( - "path,file_name", + 'path,file_name', [ ('', 'some.file'), ('/toto', 'some.file'), @@ -31,8 +31,8 @@ def test_uploadfile(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, monkeypatch response = requests.post( url, json={ - "path": cmis_tmpdir + path, - "file": {"content": file_b64_content, "filename": file_name, "content_type": "image/jpeg"}, + 'path': cmis_tmpdir + path, + 'file': {'content': file_b64_content, 'filename': file_name, 'content_type': 'image/jpeg'}, }, ) assert response.status_code == 200 @@ -59,8 +59,8 @@ def test_uploadfile_conflict(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, mo response = requests.post( url, json={ - "path": cmis_tmpdir + '/uploadconflict', - "file": {"content": file_b64_content, "filename": 'some.file', "content_type": "image/jpeg"}, + 'path': cmis_tmpdir + '/uploadconflict', + 'file': {'content': file_b64_content, 'filename': 'some.file', 'content_type': 'image/jpeg'}, }, ) assert response.status_code == 200 @@ -70,11 +70,11 @@ def test_uploadfile_conflict(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, mo response = requests.post( url, json={ - "path": cmis_tmpdir + '/uploadconflict', - "file": {"content": file_b64_content, "filename": 'some.file', "content_type": "image/jpeg"}, + 'path': cmis_tmpdir + '/uploadconflict', + 'file': {'content': file_b64_content, 'filename': 'some.file', 'content_type': 'image/jpeg'}, }, ) assert response.status_code == 200 resp_data = response.json() assert resp_data['err'] == 1 - assert resp_data['err_desc'].startswith("update conflict") + assert resp_data['err_desc'].startswith('update conflict') diff --git a/functests/planitech/conftest.py b/functests/planitech/conftest.py index 6777d533..0dd4f440 100644 --- a/functests/planitech/conftest.py +++ b/functests/planitech/conftest.py @@ -2,9 +2,9 @@ import pytest def pytest_addoption(parser): - parser.addoption("--url", help="Url of a passerelle Planitech connector instance") + parser.addoption('--url', help='Url of a passerelle Planitech connector instance') @pytest.fixture(scope='session') def conn(request): - return request.config.getoption("--url") + return request.config.getoption('--url') diff --git a/functests/planitech/test_planitech.py b/functests/planitech/test_planitech.py index 56044079..6f5fe62c 100644 --- a/functests/planitech/test_planitech.py +++ b/functests/planitech/test_planitech.py @@ -113,7 +113,7 @@ def test_main(conn): def call_generic(conn, endpoint): - print("%s \n" % endpoint) + print('%s \n' % endpoint) url = conn + '/%s' % endpoint resp = requests.get(url) resp.raise_for_status() diff --git a/functests/toulouse_axel/conftest.py b/functests/toulouse_axel/conftest.py index 404e867d..225327eb 100644 --- a/functests/toulouse_axel/conftest.py +++ b/functests/toulouse_axel/conftest.py @@ -2,25 +2,25 @@ import pytest def pytest_addoption(parser): - parser.addoption("--url", help="Url of a passerelle Toulouse Axel connector instance") - parser.addoption("--nameid", help="Publik Name ID") - parser.addoption("--firstname", help="first name of a user") - parser.addoption("--lastname", help="Last name of a user") - parser.addoption("--dob", help="Date of birth of a user") - parser.addoption("--dui", help="DUI number") + parser.addoption('--url', help='Url of a passerelle Toulouse Axel connector instance') + parser.addoption('--nameid', help='Publik Name ID') + parser.addoption('--firstname', help='first name of a user') + parser.addoption('--lastname', help='Last name of a user') + parser.addoption('--dob', help='Date of birth of a user') + parser.addoption('--dui', help='DUI number') @pytest.fixture(scope='session') def conn(request): - return request.config.getoption("--url") + return request.config.getoption('--url') @pytest.fixture(scope='session') def user(request): return { - 'name_id': request.config.getoption("--nameid"), - 'first_name': request.config.getoption("--firstname"), - 'last_name': request.config.getoption("--lastname"), - 'dob': request.config.getoption("--dob"), - 'dui': request.config.getoption("--dui"), + 'name_id': request.config.getoption('--nameid'), + 'first_name': request.config.getoption('--firstname'), + 'last_name': request.config.getoption('--lastname'), + 'dob': request.config.getoption('--dob'), + 'dui': request.config.getoption('--dui'), } diff --git a/functests/toulouse_axel/test_toulouse_axel.py b/functests/toulouse_axel/test_toulouse_axel.py index 8678516c..ee51a453 100644 --- a/functests/toulouse_axel/test_toulouse_axel.py +++ b/functests/toulouse_axel/test_toulouse_axel.py @@ -4,7 +4,7 @@ import requests def test_link(conn, user): - print("Get update management dates") + print('Get update management dates') url = conn + '/management_dates' resp = requests.get(url) resp.raise_for_status() @@ -21,7 +21,7 @@ def test_link(conn, user): 'PRENOM': user['first_name'], 'NAISSANCE': user['dob'], } - print("Creating link with the following payload:") + print('Creating link with the following payload:') pprint.pprint(payload) resp = requests.post(url, json=payload) resp.raise_for_status() @@ -30,7 +30,7 @@ def test_link(conn, user): pprint.pprint(res) print('\n') - print("GET family info") + print('GET family info') url = conn + '/family_info?NameID=%s' % name_id resp = requests.get(url) resp.raise_for_status() @@ -158,7 +158,7 @@ def test_link(conn, user): for key in flags: payload[key] = True - print("Update family info with the following payload:") + print('Update family info with the following payload:') pprint.pprint(payload) url = conn + '/update_family_info?NameID=%s' % name_id resp = requests.post(url, json=payload) @@ -168,7 +168,7 @@ def test_link(conn, user): pprint.pprint(res) print('\n') - print("GET children info") + print('GET children info') url = conn + '/children_info?NameID=%s' % (name_id) resp = requests.get(url) resp.raise_for_status() @@ -178,7 +178,7 @@ def test_link(conn, user): print('\n') for child in data['data']['ENFANT']: - print("GET child info") + print('GET child info') url = conn + '/child_info?NameID=%s&idpersonne=%s' % (name_id, child['IDPERSONNE']) resp = requests.get(url) resp.raise_for_status() @@ -187,7 +187,7 @@ def test_link(conn, user): pprint.pprint(res) print('\n') - print("GET child contact info") + print('GET child contact info') url = conn + '/child_contacts_info?NameID=%s&idpersonne=%s' % (name_id, child['IDPERSONNE']) resp = requests.get(url) resp.raise_for_status() @@ -196,7 +196,7 @@ def test_link(conn, user): pprint.pprint(res) print('\n') - print("Deleting link") + print('Deleting link') url = conn + '/unlink?NameID=%s' % name_id resp = requests.post(url) resp.raise_for_status() diff --git a/functests/toulouse_maelis/test_00_data.py b/functests/toulouse_maelis/test_00_data.py index 7f992a50..5f101030 100644 --- a/functests/toulouse_maelis/test_00_data.py +++ b/functests/toulouse_maelis/test_00_data.py @@ -5,7 +5,7 @@ from .conftest import diff @pytest.mark.parametrize( - "ref", + 'ref', [ 'ape-indicators', 'category', diff --git a/functests/toulouse_maelis/test_01_misc.py b/functests/toulouse_maelis/test_01_misc.py index c1e63eb3..1c445bdd 100644 --- a/functests/toulouse_maelis/test_01_misc.py +++ b/functests/toulouse_maelis/test_01_misc.py @@ -36,7 +36,7 @@ def test_link(conn, update_data): res = resp.json() assert res['err'] == 1 assert res['err_class'] == 'passerelle.utils.soap.SOAPFault' - assert "E02 : Le dossier numéro [999999] ne correspond à aucune famille" in res['err_desc'] + assert 'E02 : Le dossier numéro [999999] ne correspond à aucune famille' in res['err_desc'] # wrong DUI firstname payload = { diff --git a/functests/toulouse_maelis/test_02_family.py b/functests/toulouse_maelis/test_02_family.py index 967db81c..d0686aaa 100644 --- a/functests/toulouse_maelis/test_02_family.py +++ b/functests/toulouse_maelis/test_02_family.py @@ -309,7 +309,7 @@ def test_create_rl2(conn, create_data, update_data): assert diff_rlg(conn, create_data['name_id'], 2, 'test_create_rl2.json') -@pytest.mark.parametrize("rl", ['1', '2']) +@pytest.mark.parametrize('rl', ['1', '2']) def test_update_rlg(conn, update_data, rl): rlg = 'rl' + rl RLG = 'RL' + rl @@ -370,7 +370,7 @@ def test_update_rlg(conn, update_data, rl): in res['err_desc'] ) else: - assert "La date de naissance ne peut pas être modifiée" in res['err_desc'] + assert 'La date de naissance ne peut pas être modifiée' in res['err_desc'] # restore RL1 payload = copy.deepcopy(update_data['family_payload'][rlg]) diff --git a/functests/toulouse_maelis/test_05_perisco.py b/functests/toulouse_maelis/test_05_perisco.py index 4b659be8..01cd40f0 100644 --- a/functests/toulouse_maelis/test_05_perisco.py +++ b/functests/toulouse_maelis/test_05_perisco.py @@ -48,7 +48,7 @@ def test_perisco_agenda(conn, create_data, perisco_subscribe_info): if booking['disabled'] is False: break else: - raise Exception("no booking available") + raise Exception('no booking available') assert booking['details']['activity_id'] == perisco_subscribe_info['activity']['id'] assert booking['details']['activity_label'] == 'Temps du midi' assert booking['prefill'] is False @@ -124,7 +124,7 @@ def test_perisco_agenda_adulte(conn, create_data2, perisco_subscribe_adulte_info if booking['disabled'] is False: break else: - raise Exception("no booking available") + raise Exception('no booking available') assert booking['details']['activity_id'] == perisco_subscribe_adulte_info['activity']['id'] assert booking['details']['activity_label'] == 'RESTAURATION ADULTE' assert booking['prefill'] is False diff --git a/functests/vivaticket/conftest.py b/functests/vivaticket/conftest.py index d50b657f..f2c99b95 100644 --- a/functests/vivaticket/conftest.py +++ b/functests/vivaticket/conftest.py @@ -2,9 +2,9 @@ import pytest def pytest_addoption(parser): - parser.addoption("--url", help="Url of a passerelle Vivaticket connector instance") + parser.addoption('--url', help='Url of a passerelle Vivaticket connector instance') @pytest.fixture(scope='session') def conn(request): - return request.config.getoption("--url") + return request.config.getoption('--url') diff --git a/functests/vivaticket/test_vivaticket.py b/functests/vivaticket/test_vivaticket.py index 7cf0842c..b18c2007 100644 --- a/functests/vivaticket/test_vivaticket.py +++ b/functests/vivaticket/test_vivaticket.py @@ -6,7 +6,7 @@ import requests def call_generic(conn, endpoint): - print("%s \n" % endpoint) + print('%s \n' % endpoint) url = conn + '/%s' % endpoint resp = requests.get(url) resp.raise_for_status() @@ -50,7 +50,7 @@ def test_book_event(conn): themes = call_generic(conn, 'themes') random.shuffle(themes) payload['theme'] = themes[0]['id'] - print("Creating booking with the following payload:\n%s" % payload) + print('Creating booking with the following payload:\n%s' % payload) resp = requests.post(url, json=payload) resp.raise_for_status() res = resp.json() diff --git a/manage.py b/manage.py index e302b33e..c3657c54 100755 --- a/manage.py +++ b/manage.py @@ -2,8 +2,8 @@ import os import sys -if __name__ == "__main__": - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passerelle.settings") +if __name__ == '__main__': + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'passerelle.settings') from django.core.management import execute_from_command_line diff --git a/passerelle/apps/adullact_pastell/models.py b/passerelle/apps/adullact_pastell/models.py index 9afa0e38..587a58d1 100644 --- a/passerelle/apps/adullact_pastell/models.py +++ b/passerelle/apps/adullact_pastell/models.py @@ -176,7 +176,7 @@ class AdullactPastell(BaseResource, HTTPResource): 'description': _('Create a document for an entity'), 'request_body': {'schema': {'application/json': DOCUMENT_CREATION_SCHEMA}}, }, - name="create-document", + name='create-document', parameters={ 'entity_id': {'description': _('Entity ID'), 'example_value': '42'}, }, @@ -203,7 +203,7 @@ class AdullactPastell(BaseResource, HTTPResource): 'description': _('Upload a file to a document'), 'request_body': {'schema': {'application/json': DOCUMENT_FILE_UPLOAD_SCHEMA}}, }, - name="upload-document-file", + name='upload-document-file', parameters={ 'entity_id': {'description': _('Entity ID'), 'example_value': '42'}, 'document_id': {'description': _('Document ID'), 'example_value': 'hDWtdSC'}, @@ -217,7 +217,7 @@ class AdullactPastell(BaseResource, HTTPResource): @endpoint( description=_('Get document\'s file'), - name="get-document-file", + name='get-document-file', parameters={ 'entity_id': {'description': _('Entity ID'), 'example_value': '42'}, 'document_id': {'description': _('Document ID'), 'example_value': 'hDWtdSC'}, diff --git a/passerelle/apps/api_particulier/known_errors.py b/passerelle/apps/api_particulier/known_errors.py index 3a38453c..61bd8871 100644 --- a/passerelle/apps/api_particulier/known_errors.py +++ b/passerelle/apps/api_particulier/known_errors.py @@ -42,7 +42,7 @@ KNOWN_ERRORS = { "Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. Des paramètres manquent.", ( "Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. " - "La taille du message ne doit pas être supérieure à 160 caractères." + 'La taille du message ne doit pas être supérieure à 160 caractères.' ), ( "Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. " @@ -53,7 +53,7 @@ KNOWN_ERRORS = { "Votre demande n'a pu aboutir en raison d'une erreur technique lié à l'appel au service IMC.", ( "Votre demande n’a pu aboutir en raison d'un problème technique lié aux données entrantes du webservice. " - "Merci de renouveler votre demande ultérieurement." + 'Merci de renouveler votre demande ultérieurement.' ), }, } diff --git a/passerelle/apps/arpege_ecp/models.py b/passerelle/apps/arpege_ecp/models.py index 8f3ce9a8..377ccfcd 100644 --- a/passerelle/apps/arpege_ecp/models.py +++ b/passerelle/apps/arpege_ecp/models.py @@ -102,7 +102,7 @@ class ArpegeECP(BaseResource): except ValueError: raise APIError('No JSON content returned: %r' % response.content[:1000]) if not result.get('Data'): - raise APIError("%s (%s)" % (result.get('LibErreur'), result.get('CodErreur'))) + raise APIError('%s (%s)' % (result.get('LibErreur'), result.get('CodErreur'))) for demand in result['Data']['results']: try: data_administratives = demand['data_administratives'] diff --git a/passerelle/apps/astech/models.py b/passerelle/apps/astech/models.py index d41db8fd..603e9c8a 100644 --- a/passerelle/apps/astech/models.py +++ b/passerelle/apps/astech/models.py @@ -240,7 +240,7 @@ class ASTech(BaseResource, HTTPResource): @endpoint( name='services', - description=_("List authorized services for connected user"), + description=_('List authorized services for connected user'), display_category=_('Rules'), display_order=1, ) @@ -331,7 +331,7 @@ class ASTech(BaseResource, HTTPResource): @endpoint( name='parameter', - description=_("Value of a parameter"), + description=_('Value of a parameter'), parameters={ 'name': {'description': _('Name of the parameter'), 'example_value': 'LIBELDEMDEF'}, 'company': {'description': _('Company code. If absent, use "company" endpoint result')}, diff --git a/passerelle/apps/astregs/models.py b/passerelle/apps/astregs/models.py index 4a9ad1b9..4fc51472 100644 --- a/passerelle/apps/astregs/models.py +++ b/passerelle/apps/astregs/models.py @@ -28,164 +28,164 @@ from passerelle.utils.jsonresponse import APIError from passerelle.utils.validation import is_number ASSOCIATION_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "AstreGS assocation", - "description": "", - "type": "object", - "required": [ - "Financier", - "CodeFamille", - "CatTiers", - "NomEnregistrement", - "StatutTiers", - "Type", - "AdresseTitre", - "AdresseIsAdresseDeCommande", - "AdresseIsAdresseDeFacturation", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'AstreGS assocation', + 'description': '', + 'type': 'object', + 'required': [ + 'Financier', + 'CodeFamille', + 'CatTiers', + 'NomEnregistrement', + 'StatutTiers', + 'Type', + 'AdresseTitre', + 'AdresseIsAdresseDeCommande', + 'AdresseIsAdresseDeFacturation', ], - "properties": { - "Financier": {"description": "financial association", "type": "string", "enum": ["true", "false"]}, - "CodeFamille": { - "description": "association family code", - "type": "string", + 'properties': { + 'Financier': {'description': 'financial association', 'type': 'string', 'enum': ['true', 'false']}, + 'CodeFamille': { + 'description': 'association family code', + 'type': 'string', }, - "CatTiers": { - "description": "association category", - "type": "string", + 'CatTiers': { + 'description': 'association category', + 'type': 'string', }, - "NomEnregistrement": { - "description": "association name", - "type": "string", + 'NomEnregistrement': { + 'description': 'association name', + 'type': 'string', }, - "StatutTiers": { - "description": "association status", - "type": "string", - "enum": ["PROPOSE", "VALIDE", "REFUSE", "BLOQUE", "A COMPLETER"], + 'StatutTiers': { + 'description': 'association status', + 'type': 'string', + 'enum': ['PROPOSE', 'VALIDE', 'REFUSE', 'BLOQUE', 'A COMPLETER'], }, - "Type": {"description": "association type", "type": "string", "enum": ["D", "F", "*"]}, - "NumeroSiret": { - "description": "SIREN number", - "type": "string", + 'Type': {'description': 'association type', 'type': 'string', 'enum': ['D', 'F', '*']}, + 'NumeroSiret': { + 'description': 'SIREN number', + 'type': 'string', }, - "NumeroSiretFin": { - "description": "NIC number", - "type": "string", + 'NumeroSiretFin': { + 'description': 'NIC number', + 'type': 'string', }, - "AdresseTitre": { - "type": "string", + 'AdresseTitre': { + 'type': 'string', }, - "AdresseIsAdresseDeCommande": {"type": "string", "enum": ["true", "false"]}, - "AdresseIsAdresseDeFacturation": {"type": "string", "enum": ["true", "false"]}, - "organism": { - "description": _('Organisme'), - "type": "string", + 'AdresseIsAdresseDeCommande': {'type': 'string', 'enum': ['true', 'false']}, + 'AdresseIsAdresseDeFacturation': {'type': 'string', 'enum': ['true', 'false']}, + 'organism': { + 'description': _('Organisme'), + 'type': 'string', }, - "budget": { - "description": _('Budget'), - "type": "string", + 'budget': { + 'description': _('Budget'), + 'type': 'string', }, - "exercice": { - "description": _('Exercice'), - "type": "string", + 'exercice': { + 'description': _('Exercice'), + 'type': 'string', }, }, } CONTACT_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "AstreGS contact", - "description": "", - "type": "object", - "required": [ - "CodeContact", - "CodeTitreCivilite", - "Nom", - "AdresseDestinataire", - "CodePostal", - "Ville", - "EncodeKeyStatut", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'AstreGS contact', + 'description': '', + 'type': 'object', + 'required': [ + 'CodeContact', + 'CodeTitreCivilite', + 'Nom', + 'AdresseDestinataire', + 'CodePostal', + 'Ville', + 'EncodeKeyStatut', ], - "properties": { - "CodeContact": { - "type": "string", + 'properties': { + 'CodeContact': { + 'type': 'string', }, - "CodeTitreCivilite": { - "type": "string", + 'CodeTitreCivilite': { + 'type': 'string', }, - "Nom": { - "type": "string", + 'Nom': { + 'type': 'string', }, - "AdresseDestinataire": { - "type": "string", + 'AdresseDestinataire': { + 'type': 'string', }, - "CodePostal": { - "type": "string", + 'CodePostal': { + 'type': 'string', }, - "Ville": { - "type": "string", + 'Ville': { + 'type': 'string', }, - "EncodeKeyStatut": { - "type": "string", + 'EncodeKeyStatut': { + 'type': 'string', }, - "organism": { - "description": _('Organisme'), - "type": "string", + 'organism': { + 'description': _('Organisme'), + 'type': 'string', }, - "budget": { - "description": _('Budget'), - "type": "string", + 'budget': { + 'description': _('Budget'), + 'type': 'string', }, - "exercice": { - "description": _('Exercice'), - "type": "string", + 'exercice': { + 'description': _('Exercice'), + 'type': 'string', }, }, } DOCUMENT_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "AstreGS assocation", - "description": "", - "type": "object", - "required": [ - "Sujet", - "Entite", - "CodType", - "Type", - "hdnCodeTrt", - "EncodeKeyEntite", - "CodeDomaine", - "CodDom", - "document", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'AstreGS assocation', + 'description': '', + 'type': 'object', + 'required': [ + 'Sujet', + 'Entite', + 'CodType', + 'Type', + 'hdnCodeTrt', + 'EncodeKeyEntite', + 'CodeDomaine', + 'CodDom', + 'document', ], - "properties": { - "Sujet": { - "type": "string", + 'properties': { + 'Sujet': { + 'type': 'string', }, - "Entite": { - "type": "string", + 'Entite': { + 'type': 'string', }, - "CodType": { - "type": "string", + 'CodType': { + 'type': 'string', }, - "Type": { - "type": "string", + 'Type': { + 'type': 'string', }, - "hdnCodeTrt": { - "type": "string", + 'hdnCodeTrt': { + 'type': 'string', }, - "EncodeKeyEntite": { - "type": "string", + 'EncodeKeyEntite': { + 'type': 'string', }, - "CodeDomaine": { - "type": "string", + 'CodeDomaine': { + 'type': 'string', }, - "CodDom": { - "type": "string", + 'CodDom': { + 'type': 'string', }, - "document": { - "type": "object", - "required": ['filename', 'content_type', 'content'], + 'document': { + 'type': 'object', + 'required': ['filename', 'content_type', 'content'], 'properties': { 'filename': { 'type': 'string', @@ -198,236 +198,236 @@ DOCUMENT_SCHEMA = { }, }, }, - "organism": { - "description": _('Organisme'), - "type": "string", + 'organism': { + 'description': _('Organisme'), + 'type': 'string', }, - "budget": { - "description": _('Budget'), - "type": "string", + 'budget': { + 'description': _('Budget'), + 'type': 'string', }, - "exercice": { - "description": _('Exercice'), - "type": "string", + 'exercice': { + 'description': _('Exercice'), + 'type': 'string', }, }, } GRANT_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "AstreGS grant", - "description": "", - "type": "object", - "required": [ - "Libelle", - "LibelleCourt", - "ModGestion", - "TypeAide", - "Sens", - "CodeTiersDem", - "CodeServiceGestionnaire", - "CodeServiceUtilisateur", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'AstreGS grant', + 'description': '', + 'type': 'object', + 'required': [ + 'Libelle', + 'LibelleCourt', + 'ModGestion', + 'TypeAide', + 'Sens', + 'CodeTiersDem', + 'CodeServiceGestionnaire', + 'CodeServiceUtilisateur', ], - "properties": { - "Libelle": { - "type": "string", + 'properties': { + 'Libelle': { + 'type': 'string', }, - "LibelleCourt": { - "type": "string", + 'LibelleCourt': { + 'type': 'string', }, - "ModGestion": {"type": "string", "enum": ["1", "2", "3", "4"]}, - "TypeAide": { - "type": "string", + 'ModGestion': {'type': 'string', 'enum': ['1', '2', '3', '4']}, + 'TypeAide': { + 'type': 'string', }, - "Sens": { - "type": "string", + 'Sens': { + 'type': 'string', }, - "CodeTiersDem": { - "type": "string", + 'CodeTiersDem': { + 'type': 'string', }, - "CodeServiceGestionnaire": { - "type": "string", + 'CodeServiceGestionnaire': { + 'type': 'string', }, - "CodeServiceUtilisateur": { - "type": "string", + 'CodeServiceUtilisateur': { + 'type': 'string', }, - "organism": { - "description": _('Organisme'), - "type": "string", + 'organism': { + 'description': _('Organisme'), + 'type': 'string', }, - "budget": { - "description": _('Budget'), - "type": "string", + 'budget': { + 'description': _('Budget'), + 'type': 'string', }, - "exercice": { - "description": _('Exercice'), - "type": "string", + 'exercice': { + 'description': _('Exercice'), + 'type': 'string', }, }, } INDANA_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "AstreGS INDANA indicator", - "description": "", - "type": "object", - "required": ["CodeDossier", "CodeInd_1", "AnneeInd_1", "ValInd_1"], - "properties": { - "CodeDossier": { - "type": "string", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'AstreGS INDANA indicator', + 'description': '', + 'type': 'object', + 'required': ['CodeDossier', 'CodeInd_1', 'AnneeInd_1', 'ValInd_1'], + 'properties': { + 'CodeDossier': { + 'type': 'string', }, - "CodeInd_1": { - "type": "string", + 'CodeInd_1': { + 'type': 'string', }, - "AnneeInd_1": { - "type": "string", + 'AnneeInd_1': { + 'type': 'string', }, - "ValInd_1": { - "type": "string", + 'ValInd_1': { + 'type': 'string', }, - "IndAide": { - "type": "string", + 'IndAide': { + 'type': 'string', }, - "organism": { - "description": _('Organisme'), - "type": "string", + 'organism': { + 'description': _('Organisme'), + 'type': 'string', }, - "budget": { - "description": _('Budget'), - "type": "string", + 'budget': { + 'description': _('Budget'), + 'type': 'string', }, - "exercice": { - "description": _('Exercice'), - "type": "string", + 'exercice': { + 'description': _('Exercice'), + 'type': 'string', }, }, } INDANA_KEY_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "AstreGS INDANA indicator key", - "description": "", - "type": "object", - "required": ["CodeDossier", "CodeInd_1", "AnneeInd_1"], - "properties": { - "CodeDossier": { - "type": "string", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'AstreGS INDANA indicator key', + 'description': '', + 'type': 'object', + 'required': ['CodeDossier', 'CodeInd_1', 'AnneeInd_1'], + 'properties': { + 'CodeDossier': { + 'type': 'string', }, - "CodeInd_1": { - "type": "string", + 'CodeInd_1': { + 'type': 'string', }, - "AnneeInd_1": { - "type": "string", + 'AnneeInd_1': { + 'type': 'string', }, - "organism": { - "description": _('Organisme'), - "type": "string", + 'organism': { + 'description': _('Organisme'), + 'type': 'string', }, - "budget": { - "description": _('Budget'), - "type": "string", + 'budget': { + 'description': _('Budget'), + 'type': 'string', }, - "exercice": { - "description": _('Exercice'), - "type": "string", + 'exercice': { + 'description': _('Exercice'), + 'type': 'string', }, }, } TIERS_RIB_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "AstreGS TiersRib", - "description": "TiersRib", - "type": "object", - "required": [ - "CodeTiers", - "CodePaiement", - "LibelleCourt", - "NumeroIban", - "CleIban", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'AstreGS TiersRib', + 'description': 'TiersRib', + 'type': 'object', + 'required': [ + 'CodeTiers', + 'CodePaiement', + 'LibelleCourt', + 'NumeroIban', + 'CleIban', 'CodeBic', - "CodeDomiciliation", - "CodeStatut", - "CodeDevise", - "CodeIso2Pays", - "LibelleCompteEtranger", + 'CodeDomiciliation', + 'CodeStatut', + 'CodeDevise', + 'CodeIso2Pays', + 'LibelleCompteEtranger', ], - "properties": { - "CodeDevise": {"type": "string"}, - "CodeDomiciliation": {"type": "string"}, - "CodeIso2Pays": {"type": "string"}, - "CodePaiement": {"type": "string"}, - "CodeStatut": { - "type": "string", - "enum": ["PROPOSE", "VALIDE", "REFUSE", "A COMPLETER", "BLOQUE", "EN MODIFICATION"], + 'properties': { + 'CodeDevise': {'type': 'string'}, + 'CodeDomiciliation': {'type': 'string'}, + 'CodeIso2Pays': {'type': 'string'}, + 'CodePaiement': {'type': 'string'}, + 'CodeStatut': { + 'type': 'string', + 'enum': ['PROPOSE', 'VALIDE', 'REFUSE', 'A COMPLETER', 'BLOQUE', 'EN MODIFICATION'], }, - "CodeTiers": {"type": "string"}, - "IndicateurRibDefaut": {"type": "string"}, - "LibelleCompteEtranger": {"type": "string"}, - "LibelleCourt": {"type": "string"}, - "NumeroIban": {"type": "string"}, - "CleIban": {"type": "string"}, - "CodeBic": {"type": "string"}, - "IdRib": {"type": "string"}, - "organism": { - "description": _('Organisme'), - "type": "string", + 'CodeTiers': {'type': 'string'}, + 'IndicateurRibDefaut': {'type': 'string'}, + 'LibelleCompteEtranger': {'type': 'string'}, + 'LibelleCourt': {'type': 'string'}, + 'NumeroIban': {'type': 'string'}, + 'CleIban': {'type': 'string'}, + 'CodeBic': {'type': 'string'}, + 'IdRib': {'type': 'string'}, + 'organism': { + 'description': _('Organisme'), + 'type': 'string', }, - "budget": { - "description": _('Budget'), - "type": "string", + 'budget': { + 'description': _('Budget'), + 'type': 'string', }, - "exercice": { - "description": _('Exercice'), - "type": "string", + 'exercice': { + 'description': _('Exercice'), + 'type': 'string', }, }, } TIERS_RIB_UPDATE_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "AstreGS TiersRib", - "description": "TiersRib Update", - "type": "object", - "required": [ - "CodePaiement", - "LibelleCourt", - "NumeroIban", - "CleIban", - "CodeBic", - "CodeDomiciliation", - "CodeStatut", - "CodeDevise", - "CodeIso2Pays", - "LibelleCompteEtranger", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'AstreGS TiersRib', + 'description': 'TiersRib Update', + 'type': 'object', + 'required': [ + 'CodePaiement', + 'LibelleCourt', + 'NumeroIban', + 'CleIban', + 'CodeBic', + 'CodeDomiciliation', + 'CodeStatut', + 'CodeDevise', + 'CodeIso2Pays', + 'LibelleCompteEtranger', ], - "properties": { - "CodeDevise": {"type": "string"}, - "CodeDomiciliation": {"type": "string"}, - "CodeIso2Pays": {"type": "string"}, - "CodePaiement": {"type": "string"}, - "CodeStatut": { - "type": "string", - "enum": ["PROPOSE", "VALIDE", "REFUSE", "A COMPLETER", "BLOQUE", "EN MODIFICATION"], + 'properties': { + 'CodeDevise': {'type': 'string'}, + 'CodeDomiciliation': {'type': 'string'}, + 'CodeIso2Pays': {'type': 'string'}, + 'CodePaiement': {'type': 'string'}, + 'CodeStatut': { + 'type': 'string', + 'enum': ['PROPOSE', 'VALIDE', 'REFUSE', 'A COMPLETER', 'BLOQUE', 'EN MODIFICATION'], }, - "IndicateurRibDefaut": {"type": "string"}, - "LibelleCompteEtranger": {"type": "string"}, - "LibelleCourt": {"type": "string"}, - "NumeroIban": {"type": "string"}, - "CleIban": {"type": "string"}, - "CodeBic": {"type": "string"}, - "organism": { - "description": _('Organisme'), - "type": "string", + 'IndicateurRibDefaut': {'type': 'string'}, + 'LibelleCompteEtranger': {'type': 'string'}, + 'LibelleCourt': {'type': 'string'}, + 'NumeroIban': {'type': 'string'}, + 'CleIban': {'type': 'string'}, + 'CodeBic': {'type': 'string'}, + 'organism': { + 'description': _('Organisme'), + 'type': 'string', }, - "budget": { - "description": _('Budget'), - "type": "string", + 'budget': { + 'description': _('Budget'), + 'type': 'string', }, - "exercice": { - "description": _('Exercice'), - "type": "string", + 'exercice': { + 'description': _('Exercice'), + 'type': 'string', }, }, } diff --git a/passerelle/apps/base_adresse/migrations/0019_streetmodel_resource_add.py b/passerelle/apps/base_adresse/migrations/0019_streetmodel_resource_add.py index 137147e2..b158ed76 100644 --- a/passerelle/apps/base_adresse/migrations/0019_streetmodel_resource_add.py +++ b/passerelle/apps/base_adresse/migrations/0019_streetmodel_resource_add.py @@ -6,19 +6,19 @@ from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ - ("base_adresse", "0018_text_to_jsonb"), + ('base_adresse', '0018_text_to_jsonb'), ] operations = [ migrations.AddField( - model_name="streetmodel", - name="resource", + model_name='streetmodel', + name='resource', field=models.ForeignKey( default=None, null=True, on_delete=django.db.models.deletion.CASCADE, - to="base_adresse.BaseAdresse", - verbose_name="BAN Connector", + to='base_adresse.BaseAdresse', + verbose_name='BAN Connector', ), ), ] diff --git a/passerelle/apps/base_adresse/migrations/0020_streetmodel_resource_runpython.py b/passerelle/apps/base_adresse/migrations/0020_streetmodel_resource_runpython.py index 959b9bbe..b187ce8c 100644 --- a/passerelle/apps/base_adresse/migrations/0020_streetmodel_resource_runpython.py +++ b/passerelle/apps/base_adresse/migrations/0020_streetmodel_resource_runpython.py @@ -4,8 +4,8 @@ from django.db import migrations def set_streetmodel_resource(apps, schema_editor): - BaseAdresse = apps.get_model("base_adresse", "BaseAdresse") - StreetModel = apps.get_model("base_adresse", "StreetModel") + BaseAdresse = apps.get_model('base_adresse', 'BaseAdresse') + StreetModel = apps.get_model('base_adresse', 'StreetModel') if BaseAdresse.objects.exists(): StreetModel.objects.update(resource=BaseAdresse.objects.first()) else: @@ -14,7 +14,7 @@ def set_streetmodel_resource(apps, schema_editor): class Migration(migrations.Migration): dependencies = [ - ("base_adresse", "0019_streetmodel_resource_add"), + ('base_adresse', '0019_streetmodel_resource_add'), ] operations = [ diff --git a/passerelle/apps/base_adresse/migrations/0021_streetmodel_resource_alter.py b/passerelle/apps/base_adresse/migrations/0021_streetmodel_resource_alter.py index fb7c7ef8..22042035 100644 --- a/passerelle/apps/base_adresse/migrations/0021_streetmodel_resource_alter.py +++ b/passerelle/apps/base_adresse/migrations/0021_streetmodel_resource_alter.py @@ -6,17 +6,17 @@ from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ - ("base_adresse", "0020_streetmodel_resource_runpython"), + ('base_adresse', '0020_streetmodel_resource_runpython'), ] operations = [ migrations.AlterField( - model_name="streetmodel", - name="resource", + model_name='streetmodel', + name='resource', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, - to="base_adresse.BaseAdresse", - verbose_name="BAN Connector", + to='base_adresse.BaseAdresse', + verbose_name='BAN Connector', ), ), ] diff --git a/passerelle/apps/base_adresse/migrations/0023_resource_in_models_runpython.py b/passerelle/apps/base_adresse/migrations/0023_resource_in_models_runpython.py index c4357755..8a08aa2a 100644 --- a/passerelle/apps/base_adresse/migrations/0023_resource_in_models_runpython.py +++ b/passerelle/apps/base_adresse/migrations/0023_resource_in_models_runpython.py @@ -4,11 +4,11 @@ from django.db import migrations def set_resource(apps, schema_editor): - BaseAdresse = apps.get_model("base_adresse", "BaseAdresse") - RegionModel = apps.get_model("base_adresse", "RegionModel") - DepartmentModel = apps.get_model("base_adresse", "DepartmentModel") - CityModel = apps.get_model("base_adresse", "CityModel") - AddressCacheModel = apps.get_model("base_adresse", "AddressCacheModel") + BaseAdresse = apps.get_model('base_adresse', 'BaseAdresse') + RegionModel = apps.get_model('base_adresse', 'RegionModel') + DepartmentModel = apps.get_model('base_adresse', 'DepartmentModel') + CityModel = apps.get_model('base_adresse', 'CityModel') + AddressCacheModel = apps.get_model('base_adresse', 'AddressCacheModel') if BaseAdresse.objects.exists(): resource = BaseAdresse.objects.first() RegionModel.objects.update(resource=resource) diff --git a/passerelle/apps/base_adresse/models.py b/passerelle/apps/base_adresse/models.py index bbd23395..fc759455 100644 --- a/passerelle/apps/base_adresse/models.py +++ b/passerelle/apps/base_adresse/models.py @@ -326,7 +326,7 @@ class BaseAdresse(AddressResource): perm='OPEN', parameters={ 'id': {'description': _('Street identifier')}, - 'q': {'description': _("Street name")}, + 'q': {'description': _('Street name')}, 'zipcode': {'description': _('Zipcode')}, 'citycode': {'description': _('INSEE City code')}, 'page_limit': {'description': _('Maximum number of results to return'), 'example_value': 30}, @@ -383,7 +383,7 @@ class BaseAdresse(AddressResource): 'description': _('Get exactly one city using its code and postal code separated with a dot'), 'example_value': '75056.75014', }, - 'q': {'description': _("Search text in name or postal code"), 'example_value': 'Paris'}, + 'q': {'description': _('Search text in name or postal code'), 'example_value': 'Paris'}, 'code': { 'description': _('INSEE code (or multiple codes separated with commas)'), 'example_value': '75056', diff --git a/passerelle/apps/cityweb/cityweb.py b/passerelle/apps/cityweb/cityweb.py index ab13c9d2..01d205a6 100644 --- a/passerelle/apps/cityweb/cityweb.py +++ b/passerelle/apps/cityweb/cityweb.py @@ -26,33 +26,33 @@ from passerelle.utils.files import atomic_write from passerelle.utils.jsonresponse import APIError CERTIFICATE_TYPES = [ - {"id": "NAI", "text": "Naissance"}, - {"id": "MAR", "text": "Mariage"}, - {"id": "REC", "text": "Reconnaissance"}, - {"id": "DEC", "text": "Décès"}, + {'id': 'NAI', 'text': 'Naissance'}, + {'id': 'MAR', 'text': 'Mariage'}, + {'id': 'REC', 'text': 'Reconnaissance'}, + {'id': 'DEC', 'text': 'Décès'}, ] -SEXES = [{"id": "M", "text": "Homme"}, {"id": "F", "text": "Femme"}, {"id": "NA", "text": "Autre"}] +SEXES = [{'id': 'M', 'text': 'Homme'}, {'id': 'F', 'text': 'Femme'}, {'id': 'NA', 'text': 'Autre'}] TITLES = [ - {"id": "M", "text": "Monsieur"}, - {"id": "Mme", "text": "Madame"}, - {"id": "Mlle", "text": "Mademoiselle"}, + {'id': 'M', 'text': 'Monsieur'}, + {'id': 'Mme', 'text': 'Madame'}, + {'id': 'Mlle', 'text': 'Mademoiselle'}, ] DOCUMENT_TYPES = [ - {"id": "CPI", "text": "Copie intégrale"}, - {"id": "EXTAF", "text": "Extrait avec filiation"}, - {"id": "EXTSF", "text": "Extrait sans filiation"}, - {"id": "EXTPL", "text": "Extrait plurilingue"}, + {'id': 'CPI', 'text': 'Copie intégrale'}, + {'id': 'EXTAF', 'text': 'Extrait avec filiation'}, + {'id': 'EXTSF', 'text': 'Extrait sans filiation'}, + {'id': 'EXTPL', 'text': 'Extrait plurilingue'}, ] -CONCERNED = [{"id": "reconnu", "text": "Reconnu"}, {"id": "auteur", "text": "Auteur"}] +CONCERNED = [{'id': 'reconnu', 'text': 'Reconnu'}, {'id': 'auteur', 'text': 'Auteur'}] ORIGINS = [ - {"id": "internet", "text": "Internet"}, - {"id": "guichet", "text": "Guichet"}, - {"id": "courrier", "text": "Courrier"}, + {'id': 'internet', 'text': 'Internet'}, + {'id': 'guichet', 'text': 'Guichet'}, + {'id': 'courrier', 'text': 'Courrier'}, ] diff --git a/passerelle/apps/cmis/models.py b/passerelle/apps/cmis/models.py index 7c7964d6..a43f82e5 100644 --- a/passerelle/apps/cmis/models.py +++ b/passerelle/apps/cmis/models.py @@ -129,10 +129,10 @@ class CmisConnector(BaseResource): def uploadfile(self, request, post_data): error, error_msg, data = self._validate_inputs(post_data) if error: - self.logger.debug("received invalid data: %s" % error_msg) + self.logger.debug('received invalid data: %s' % error_msg) raise APIError(error_msg, http_status=400) filename = data.get('filename') or data['file']['filename'] - self.logger.info("received file_name: '%s', file_path: '%s'", filename, data["path"]) + self.logger.info("received file_name: '%s', file_path: '%s'", filename, data['path']) with self.get_cmis_gateway() as cmis_gateway: doc = cmis_gateway.create_doc( filename, @@ -226,15 +226,15 @@ def wrap_cmis_error(f): return f(*args, **kwargs) except (urllib2.URLError, httplib2.HttpLib2Error) as e: # FIXME urllib2 still used for cmslib 0.5 compat - raise APIError("connection error: %s" % e) + raise APIError('connection error: %s' % e) except PermissionDeniedException as e: - raise APIError("permission denied: %s" % e) + raise APIError('permission denied: %s' % e) except UpdateConflictException as e: - raise APIError("update conflict: %s" % e) + raise APIError('update conflict: %s' % e) except InvalidArgumentException as e: - raise APIError("invalid property name: %s" % e) + raise APIError('invalid property name: %s' % e) except CmisException as e: - raise APIError("cmis binding error: %s" % e) + raise APIError('cmis binding error: %s' % e) return wrapper @@ -256,7 +256,7 @@ class CMISGateway: return res except ObjectNotFoundException: self._logger.debug("'%s' not found" % file_path) - basepath = "" + basepath = '' folder = self.repo.rootFolder for path_part in file_path.strip('/').split('/'): basepath += '/%s' % path_part diff --git a/passerelle/apps/cryptor/models.py b/passerelle/apps/cryptor/models.py index cfa98ada..765c3c51 100644 --- a/passerelle/apps/cryptor/models.py +++ b/passerelle/apps/cryptor/models.py @@ -37,19 +37,19 @@ from passerelle.utils.files import atomic_write from passerelle.utils.jsonresponse import APIError FILE_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "File to encrypt", - "description": "", - "type": "object", - "required": ["file"], - "properties": { - "file": { - "type": "object", - "required": ["filename", "content_type", "content"], - "properties": { - "filename": {"type": "string"}, - "content_type": {"type": "string"}, - "content": {"type": "string"}, + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'File to encrypt', + 'description': '', + 'type': 'object', + 'required': ['file'], + 'properties': { + 'file': { + 'type': 'object', + 'required': ['filename', 'content_type', 'content'], + 'properties': { + 'filename': {'type': 'string'}, + 'content_type': {'type': 'string'}, + 'content': {'type': 'string'}, }, } }, diff --git a/passerelle/apps/csvdatasource/models.py b/passerelle/apps/csvdatasource/models.py index 7852ac61..c98284d9 100644 --- a/passerelle/apps/csvdatasource/models.py +++ b/passerelle/apps/csvdatasource/models.py @@ -45,7 +45,7 @@ with ignore_loggers('lml', 'pyexcel_io'): from pyexcel_ods import get_data as get_data_ods from pyexcel_xls import get_data as get_data_xls -identifier_re = re.compile(r"^[^\d\W]\w*\Z", re.UNICODE) +identifier_re = re.compile(r'^[^\d\W]\w*\Z', re.UNICODE) code_cache = OrderedDict() @@ -460,14 +460,14 @@ class CsvDataSource(BaseResource): if 'id' in request.GET and not model_filters_id: # always provide a ?id= filter. - filters = ["id == %r" % force_str(request.GET['id'])] + filters = ['id == %r' % force_str(request.GET['id'])] data = [row for new_row, row in stream_expressions(filters, data, kind='filters') if new_row[0]] # allow jsonp queries by select2 # filtering is done there after projection because we need a projection named text for # retro-compatibility with previous use of the csvdatasource with select2 if 'q' in request.GET: - filters = ["%s in normalize(text.lower())" % repr(normalize(request.GET['q'].lower()))] + filters = ['%s in normalize(text.lower())' % repr(normalize(request.GET['q'].lower()))] data = [row for new_row, row in stream_expressions(filters, data, kind='filters') if new_row[0]] # force rendition of iterator as list diff --git a/passerelle/apps/esirius/models.py b/passerelle/apps/esirius/models.py index 305f347c..55a7e452 100644 --- a/passerelle/apps/esirius/models.py +++ b/passerelle/apps/esirius/models.py @@ -33,7 +33,7 @@ from passerelle.utils.jsonresponse import APIError CREATE_APPOINTMENT_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', - "type": "object", + 'type': 'object', 'properties': { 'idSys': {'type': 'string', 'pattern': '^[0-9]*$'}, 'codeRDV': {'type': 'string'}, @@ -52,7 +52,7 @@ CREATE_APPOINTMENT_SCHEMA = { 'properties': { 'idSys': {'type': 'string', 'pattern': '^[0-9]*$'}, 'personalIdentity': {'type': 'string'}, - 'additionalPersonalIdentity': {"type": "array", "items": {'type': 'string'}}, + 'additionalPersonalIdentity': {'type': 'array', 'items': {'type': 'string'}}, 'lastName': {'type': 'string'}, 'civility': {'type': 'string'}, 'firstName': {'type': 'string'}, @@ -74,7 +74,7 @@ CREATE_APPOINTMENT_SCHEMA = { }, 'serviceId': {'type': 'string'}, 'siteCode': {'type': 'string'}, - "resources": { + 'resources': { 'type': 'object', 'properties': { 'id': {'type': 'string', 'pattern': '^[0-9]*$'}, @@ -92,8 +92,8 @@ CREATE_APPOINTMENT_SCHEMA = { }, }, 'motives': { - "type": "array", - "items": { + 'type': 'array', + 'items': { 'type': 'object', 'properties': { 'id': {'type': 'string', 'pattern': '^[0-9]*$'}, diff --git a/passerelle/apps/filr_rest/models.py b/passerelle/apps/filr_rest/models.py index 35e0d8ea..3a5e6a14 100644 --- a/passerelle/apps/filr_rest/models.py +++ b/passerelle/apps/filr_rest/models.py @@ -146,7 +146,7 @@ class Filr(BaseResource, HTTPResource): return {'data': data} @endpoint( - name="delete-folder", + name='delete-folder', methods=['post'], description=_('Delete a folder'), post={'request_body': {'schema': {'application/json': schemas.DELETE_FOLDER}}}, diff --git a/passerelle/apps/gesbac/models.py b/passerelle/apps/gesbac/models.py index 451c3d74..f0763bdf 100644 --- a/passerelle/apps/gesbac/models.py +++ b/passerelle/apps/gesbac/models.py @@ -32,156 +32,156 @@ FILES_ENCODING = 'latin-1' APPLICANT_SCHEMA = OrderedDict( ( ( - "form_id", + 'form_id', { - "type": "string", + 'type': 'string', }, ), ( - "demand_date", + 'demand_date', { - "type": "string", - "pattern": "^[0-9]{8}$", + 'type': 'string', + 'pattern': '^[0-9]{8}$', }, ), ( - "demand_time", + 'demand_time', { - "type": "string", - "pattern": "^[0-9]{6}$", + 'type': 'string', + 'pattern': '^[0-9]{6}$', }, ), ( - "producer_code", + 'producer_code', { - "type": "integer", + 'type': 'integer', }, ), - ("invariant_number", {"type": "string", "maxLength": 10, "default": ""}), + ('invariant_number', {'type': 'string', 'maxLength': 10, 'default': ''}), ( - "city_insee_code", + 'city_insee_code', { - "type": "string", + 'type': 'string', }, ), ( - "street_rivoli_code", + 'street_rivoli_code', { - "type": "string", + 'type': 'string', }, ), ( - "street_name", + 'street_name', { - "type": "string", + 'type': 'string', }, ), - ("address_complement", {"type": "string", "maxLength": 32, "default": ""}), - ("street_number", {"type": "integer", "default": 0}), - ("bis_ter", {"type": "string", "maxLength": 3, "default": ""}), - ("building", {"type": "string", "maxLength": 5, "default": ""}), - ("hall", {"type": "string", "maxLength": 5, "default": ""}), - ("appartment_number", {"type": "string", "maxLength": 5, "default": ""}), - ("producer_social_reason", {"type": "string", "maxLength": 38, "default": ""}), - ("producer_title_code", {"type": "integer", "default": 0}), - ("producer_last_name", {"type": "string", "maxLength": 38, "default": ""}), - ("producer_first_name", {"type": "string", "maxLength": 32, "default": ""}), - ("producer_phone", {"type": "string", "maxLength": 20, "default": ""}), - ("producer_email", {"type": "string", "maxLength": 50, "default": ""}), - ("owner_last_name", {"type": "string", "maxLength": 38, "default": ""}), - ("owner_first_name", {"type": "string", "maxLength": 32, "default": ""}), - ("owner_phone", {"type": "string", "maxLength": 20, "default": ""}), - ("owner_email", {"type": "string", "maxLength": 50, "default": ""}), - ("activity_code", {"type": "integer", "default": 0}), - ("family_members_number", {"type": "integer", "default": 0}), - ("houses_number", {"type": "integer", "default": 0}), - ("t1_flats_number", {"type": "integer", "default": 0}), - ("t2_flats_number", {"type": "integer", "default": 0}), - ("t3_flats_number", {"type": "integer", "default": 0}), - ("t4_flats_number", {"type": "integer", "default": 0}), - ("t5_flats_number", {"type": "integer", "default": 0}), - ("t6_flats_number", {"type": "integer", "default": 0}), - ("shops_number", {"type": "integer", "default": 0}), - ("garden_size", {"type": "integer", "default": 0}), - ("expected_date", {"type": "string", "pattern": "^[0-9]{8}$", "default": ""}), - ("expected_time", {"type": "string", "pattern": "^[0-9]{4}$", "default": ""}), - ("modification_code", {"type": "integer", "default": 0}), - ("demand_reason_label", {"type": "string", "default": ""}), - ("comment", {"type": "string", "maxLength": 500, "default": ""}), + ('address_complement', {'type': 'string', 'maxLength': 32, 'default': ''}), + ('street_number', {'type': 'integer', 'default': 0}), + ('bis_ter', {'type': 'string', 'maxLength': 3, 'default': ''}), + ('building', {'type': 'string', 'maxLength': 5, 'default': ''}), + ('hall', {'type': 'string', 'maxLength': 5, 'default': ''}), + ('appartment_number', {'type': 'string', 'maxLength': 5, 'default': ''}), + ('producer_social_reason', {'type': 'string', 'maxLength': 38, 'default': ''}), + ('producer_title_code', {'type': 'integer', 'default': 0}), + ('producer_last_name', {'type': 'string', 'maxLength': 38, 'default': ''}), + ('producer_first_name', {'type': 'string', 'maxLength': 32, 'default': ''}), + ('producer_phone', {'type': 'string', 'maxLength': 20, 'default': ''}), + ('producer_email', {'type': 'string', 'maxLength': 50, 'default': ''}), + ('owner_last_name', {'type': 'string', 'maxLength': 38, 'default': ''}), + ('owner_first_name', {'type': 'string', 'maxLength': 32, 'default': ''}), + ('owner_phone', {'type': 'string', 'maxLength': 20, 'default': ''}), + ('owner_email', {'type': 'string', 'maxLength': 50, 'default': ''}), + ('activity_code', {'type': 'integer', 'default': 0}), + ('family_members_number', {'type': 'integer', 'default': 0}), + ('houses_number', {'type': 'integer', 'default': 0}), + ('t1_flats_number', {'type': 'integer', 'default': 0}), + ('t2_flats_number', {'type': 'integer', 'default': 0}), + ('t3_flats_number', {'type': 'integer', 'default': 0}), + ('t4_flats_number', {'type': 'integer', 'default': 0}), + ('t5_flats_number', {'type': 'integer', 'default': 0}), + ('t6_flats_number', {'type': 'integer', 'default': 0}), + ('shops_number', {'type': 'integer', 'default': 0}), + ('garden_size', {'type': 'integer', 'default': 0}), + ('expected_date', {'type': 'string', 'pattern': '^[0-9]{8}$', 'default': ''}), + ('expected_time', {'type': 'string', 'pattern': '^[0-9]{4}$', 'default': ''}), + ('modification_code', {'type': 'integer', 'default': 0}), + ('demand_reason_label', {'type': 'string', 'default': ''}), + ('comment', {'type': 'string', 'maxLength': 500, 'default': ''}), ) ) CARD_SCHEMA = OrderedDict( ( ( - "card_subject", + 'card_subject', { - "type": "integer", + 'type': 'integer', }, ), ( - "card_type", + 'card_type', { - "type": "integer", + 'type': 'integer', }, ), ( - "card_demand_reason", + 'card_demand_reason', { - "type": "integer", + 'type': 'integer', }, ), ( - "cards_quantity", + 'cards_quantity', { - "type": "integer", + 'type': 'integer', }, ), ( - "card_number", + 'card_number', { - "type": "string", - "maxLength": 20, + 'type': 'string', + 'maxLength': 20, }, ), ( - "card_bar_code", + 'card_bar_code', { - "type": "string", - "maxLength": 20, - "default": "", + 'type': 'string', + 'maxLength': 20, + 'default': '', }, ), ( - "card_code", + 'card_code', { - "type": "string", - "maxLength": 20, - "default": "", + 'type': 'string', + 'maxLength': 20, + 'default': '', }, ), ( - "card_validity_start_date", + 'card_validity_start_date', { - "type": "string", - "pattern": "^[0-9]{8}$", - "default": "", + 'type': 'string', + 'pattern': '^[0-9]{8}$', + 'default': '', }, ), ( - "card_validity_end_date", + 'card_validity_end_date', { - "type": "string", - "pattern": "^[0-9]{8}$", - "default": "", + 'type': 'string', + 'pattern': '^[0-9]{8}$', + 'default': '', }, ), ( - "card_comment", + 'card_comment', { - "type": "string", - "maxLength": 100, - "default": "", + 'type': 'string', + 'maxLength': 100, + 'default': '', }, ), ) @@ -191,24 +191,24 @@ DEMAND_SCHEMA = APPLICANT_SCHEMA.copy() DEMAND_SCHEMA.update(CARD_SCHEMA) SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "Gesbac", - "description": "", - "type": "object", - "required": [ - "form_id", - "demand_date", - "demand_time", - "producer_code", - "city_insee_code", - "street_rivoli_code", - "street_name", - "card_subject", - "card_type", - "card_demand_reason", - "cards_quantity", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'Gesbac', + 'description': '', + 'type': 'object', + 'required': [ + 'form_id', + 'demand_date', + 'demand_time', + 'producer_code', + 'city_insee_code', + 'street_rivoli_code', + 'street_name', + 'card_subject', + 'card_type', + 'card_demand_reason', + 'cards_quantity', ], - "properties": DEMAND_SCHEMA, + 'properties': DEMAND_SCHEMA, } diff --git a/passerelle/apps/holidays/models.py b/passerelle/apps/holidays/models.py index 41631b45..b06a2b20 100644 --- a/passerelle/apps/holidays/models.py +++ b/passerelle/apps/holidays/models.py @@ -33,9 +33,9 @@ ZONE_CHOICES = ( ) HOLIDAYS_LABELS = { - 'winter_holidays': "Vacances d’Hiver", + 'winter_holidays': 'Vacances d’Hiver', 'spring_holidays': 'Vacances de Pâques', - 'summer_holidays': "Vacances d’Été", + 'summer_holidays': 'Vacances d’Été', 'all_saints_holidays': 'Vacances de la Toussaint', 'christmas_holidays': 'Vacances de Noël', } diff --git a/passerelle/apps/litteralis/models.py b/passerelle/apps/litteralis/models.py index 1f034ca3..f1087001 100644 --- a/passerelle/apps/litteralis/models.py +++ b/passerelle/apps/litteralis/models.py @@ -36,9 +36,9 @@ def parse_datetime(datetime_str): try: obj = dateparse.parse_datetime(datetime_str) except ValueError: - raise APIError("Invalid datetime: %s" % datetime_str) + raise APIError('Invalid datetime: %s' % datetime_str) if obj is None: - raise APIError("Invalid datetime format: %s" % datetime_str) + raise APIError('Invalid datetime format: %s' % datetime_str) return obj diff --git a/passerelle/apps/mdel/mdel.py b/passerelle/apps/mdel/mdel.py index 485d4ff4..5c88407d 100644 --- a/passerelle/apps/mdel/mdel.py +++ b/passerelle/apps/mdel/mdel.py @@ -60,7 +60,7 @@ class MDELBase: class Common(MDELBase): - xmlns = "http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier" + xmlns = 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier' def __init__(self, *args, **kwargs): self.flow_type = args[0] diff --git a/passerelle/apps/mdel/models.py b/passerelle/apps/mdel/models.py index bf61a97f..24791eb4 100644 --- a/passerelle/apps/mdel/models.py +++ b/passerelle/apps/mdel/models.py @@ -43,15 +43,15 @@ STATUS_MAPPING = { } APPLICANTS = [ - {"id": "PersonneConcernee", "text": "La personne concernée par l'acte"}, - {"id": "PereMere", "text": "Son père ou sa mère"}, - {"id": "Conjoint", "text": "Son conjoint ou sa conjointe"}, - {"id": "Fils", "text": "Son fils ou sa fille"}, - {"id": "GrandPere", "text": "Son grand-père ou sa grand-mère"}, - {"id": "PetitFils", "text": "Son petit-fils ou sa petite-fille"}, - {"id": "Representant", "text": "Son représentant légal"}, - {"id": "Heritier", "text": "Son héritier"}, - {"id": "Autre", "text": "Autre"}, + {'id': 'PersonneConcernee', 'text': "La personne concernée par l'acte"}, + {'id': 'PereMere', 'text': 'Son père ou sa mère'}, + {'id': 'Conjoint', 'text': 'Son conjoint ou sa conjointe'}, + {'id': 'Fils', 'text': 'Son fils ou sa fille'}, + {'id': 'GrandPere', 'text': 'Son grand-père ou sa grand-mère'}, + {'id': 'PetitFils', 'text': 'Son petit-fils ou sa petite-fille'}, + {'id': 'Representant', 'text': 'Son représentant légal'}, + {'id': 'Heritier', 'text': 'Son héritier'}, + {'id': 'Autre', 'text': 'Autre'}, ] CERTIFICATES = [ diff --git a/passerelle/apps/mobyt/models.py b/passerelle/apps/mobyt/models.py index cf9a96b1..c0ce24af 100644 --- a/passerelle/apps/mobyt/models.py +++ b/passerelle/apps/mobyt/models.py @@ -55,6 +55,6 @@ class MobytSMSGateway(SMSResource): r = self.requests.post(self.URL, data=params) except requests.RequestException as e: raise APIError('MobyT error: POST failed, %s' % e) - if r.text[:2] != "OK": + if r.text[:2] != 'OK': raise APIError('MobyT error: response is not "OK"') # credit consumed is unknown diff --git a/passerelle/apps/opendatasoft/models.py b/passerelle/apps/opendatasoft/models.py index 05305450..87264ebe 100644 --- a/passerelle/apps/opendatasoft/models.py +++ b/passerelle/apps/opendatasoft/models.py @@ -176,7 +176,7 @@ class Query(BaseQuery): sort = models.CharField( verbose_name=_('Sort field'), help_text=_( - "Sorts results by the specified field. A minus sign - may be used to perform an ascending sort." + 'Sorts results by the specified field. A minus sign - may be used to perform an ascending sort.' ), max_length=256, blank=True, diff --git a/passerelle/apps/opendatasoft/views.py b/passerelle/apps/opendatasoft/views.py index ca8d76ef..2548e44b 100644 --- a/passerelle/apps/opendatasoft/views.py +++ b/passerelle/apps/opendatasoft/views.py @@ -25,7 +25,7 @@ from .forms import QueryForm class QueryNew(ResourceChildViewMixin, CreateView): model = models.Query form_class = QueryForm - template_name = "passerelle/manage/resource_child_form.html" + template_name = 'passerelle/manage/resource_child_form.html' def get_form_kwargs(self): kwargs = super().get_form_kwargs() @@ -36,9 +36,9 @@ class QueryNew(ResourceChildViewMixin, CreateView): class QueryEdit(ResourceChildViewMixin, UpdateView): model = models.Query form_class = QueryForm - template_name = "passerelle/manage/resource_child_form.html" + template_name = 'passerelle/manage/resource_child_form.html' class QueryDelete(ResourceChildViewMixin, DeleteView): model = models.Query - template_name = "passerelle/manage/resource_child_confirm_delete.html" + template_name = 'passerelle/manage/resource_child_confirm_delete.html' diff --git a/passerelle/apps/opengis/models.py b/passerelle/apps/opengis/models.py index 9c72dd5d..e0f4b783 100644 --- a/passerelle/apps/opengis/models.py +++ b/passerelle/apps/opengis/models.py @@ -393,8 +393,8 @@ class OpenGIS(BaseResource): point_lon = closest_feature['geometry']['coordinates'][0] point_lat = closest_feature['geometry']['coordinates'][1] point_lon, point_lat = self.convert_coordinates(point_lon, point_lat, reverse=True) - result['lon'] = "%.6f" % point_lon - result['lat'] = "%.6f" % point_lat + result['lon'] = '%.6f' % point_lon + result['lat'] = '%.6f' % point_lat result['address'] = {} for attribute, properties in self.attributes_mapping: diff --git a/passerelle/apps/ovh/models.py b/passerelle/apps/ovh/models.py index 4922801a..235a72f8 100644 --- a/passerelle/apps/ovh/models.py +++ b/passerelle/apps/ovh/models.py @@ -144,14 +144,14 @@ class OVHSMSGateway(SMSResource): body = json.dumps(kwargs['json']) if 'json' in kwargs else '' now = str(int(time.time())) signature = hashlib.sha1() - to_sign = "+".join((self.application_secret, self.consumer_key, method.upper(), url, body, now)) + to_sign = '+'.join((self.application_secret, self.consumer_key, method.upper(), url, body, now)) signature.update(to_sign.encode()) headers = { 'X-Ovh-Application': self.application_key, 'X-Ovh-Consumer': self.consumer_key, 'X-Ovh-Timestamp': now, - 'X-Ovh-Signature': "$1$" + signature.hexdigest(), + 'X-Ovh-Signature': '$1$' + signature.hexdigest(), } try: diff --git a/passerelle/apps/ovh/views.py b/passerelle/apps/ovh/views.py index 13a4ad92..6d71fdc3 100644 --- a/passerelle/apps/ovh/views.py +++ b/passerelle/apps/ovh/views.py @@ -11,18 +11,18 @@ from .models import OVHSMSGateway class RequestTokenView(RedirectView): def get_redirect_url(self, *args, **kwargs): - connector = OVHSMSGateway.objects.get(slug=kwargs["slug"]) + connector = OVHSMSGateway.objects.get(slug=kwargs['slug']) request_id = uuid.uuid4() confirm_token_url = reverse('ovh-confirm-token', kwargs={'slug': connector.slug, 'uuid': request_id}) data = { - "accessRules": [ - {"method": "GET", "path": "/sms/%s/" % connector.account}, - {"method": "POST", "path": "/sms/%s/jobs/" % connector.account}, + 'accessRules': [ + {'method': 'GET', 'path': '/sms/%s/' % connector.account}, + {'method': 'POST', 'path': '/sms/%s/jobs/' % connector.account}, ], - "redirection": self.request.build_absolute_uri(confirm_token_url), + 'redirection': self.request.build_absolute_uri(confirm_token_url), } - headers = {"X-Ovh-Application": connector.application_key} + headers = {'X-Ovh-Application': connector.application_key} try: resp = connector.requests.post( @@ -49,7 +49,7 @@ class RequestTokenView(RedirectView): class ConfirmTokenView(RedirectView): def get_redirect_url(self, *args, **kwargs): - connector = OVHSMSGateway.objects.get(slug=kwargs["slug"]) + connector = OVHSMSGateway.objects.get(slug=kwargs['slug']) consumer_key = self.request.session.get('ovh-token-%s' % kwargs['uuid']) if consumer_key: diff --git a/passerelle/apps/oxyd/models.py b/passerelle/apps/oxyd/models.py index 759f29f4..497d5db4 100644 --- a/passerelle/apps/oxyd/models.py +++ b/passerelle/apps/oxyd/models.py @@ -23,8 +23,8 @@ class OxydSMSGateway(SMSResource): 'err': 1, 'err_desc': 'OXYD error: some destinations failed', 'data': [ - ['0033688888888', "OXYD error: response is not 200"], - ['0033677777777', "OXYD error: response is not 200"], + ['0033688888888', 'OXYD error: response is not 200'], + ['0033677777777', 'OXYD error: response is not 200'], ], }, }, diff --git a/passerelle/apps/plone_restapi/models.py b/passerelle/apps/plone_restapi/models.py index 1d18318f..0cb5c1ac 100644 --- a/passerelle/apps/plone_restapi/models.py +++ b/passerelle/apps/plone_restapi/models.py @@ -422,7 +422,7 @@ class Query(BaseQuery): ) order = models.BooleanField( verbose_name=_('Ascending sort order'), - help_text=_("Unset to use descending sort order"), + help_text=_('Unset to use descending sort order'), default=True, ) limit = models.PositiveIntegerField( diff --git a/passerelle/apps/plone_restapi/views.py b/passerelle/apps/plone_restapi/views.py index f5ea89ef..32ed12ff 100644 --- a/passerelle/apps/plone_restapi/views.py +++ b/passerelle/apps/plone_restapi/views.py @@ -25,7 +25,7 @@ from .forms import QueryForm class QueryNew(ResourceChildViewMixin, CreateView): model = models.Query form_class = QueryForm - template_name = "passerelle/manage/resource_child_form.html" + template_name = 'passerelle/manage/resource_child_form.html' def get_form_kwargs(self): kwargs = super().get_form_kwargs() @@ -36,9 +36,9 @@ class QueryNew(ResourceChildViewMixin, CreateView): class QueryEdit(ResourceChildViewMixin, UpdateView): model = models.Query form_class = QueryForm - template_name = "passerelle/manage/resource_child_form.html" + template_name = 'passerelle/manage/resource_child_form.html' class QueryDelete(ResourceChildViewMixin, DeleteView): model = models.Query - template_name = "passerelle/manage/resource_child_confirm_delete.html" + template_name = 'passerelle/manage/resource_child_confirm_delete.html' diff --git a/passerelle/apps/signal_arretes/models.py b/passerelle/apps/signal_arretes/models.py index 77eef907..928c8574 100644 --- a/passerelle/apps/signal_arretes/models.py +++ b/passerelle/apps/signal_arretes/models.py @@ -134,7 +134,7 @@ class SignalArretes(BaseResource, HTTPResource): def _get_value(self, endpoint, post_data=None, request_id=None): if request_id: - url = f"{endpoint}/{request_id}" + url = f'{endpoint}/{request_id}' else: url = endpoint diff --git a/passerelle/apps/smsfactor/models.py b/passerelle/apps/smsfactor/models.py index 825cf387..b18e7b3e 100644 --- a/passerelle/apps/smsfactor/models.py +++ b/passerelle/apps/smsfactor/models.py @@ -60,48 +60,48 @@ class SMSFactorSMSGateway(SMSResource): { 'status_code': 200, 'response': { - "status": -7, - "message": "Erreur de données", - "details": "Texte du message introuvable", + 'status': -7, + 'message': 'Erreur de données', + 'details': 'Texte du message introuvable', }, 'result': { 'err': 1, 'err_desc': 'SMS Factor error: some destinations failed', 'data': [ - ['33688888888', "Texte du message introuvable"], - ['33677777777', "Texte du message introuvable"], + ['33688888888', 'Texte du message introuvable'], + ['33677777777', 'Texte du message introuvable'], ], }, }, { 'status_code': 200, 'response': { - "status": 1, - "message": "OK", - "ticket": "14672468", - "cost": 2, - "credits": 642, - "total": 2, - "sent": 2, - "blacklisted": 0, - "duplicated": 0, - "invalid": 0, - "npai": 0, + 'status': 1, + 'message': 'OK', + 'ticket': '14672468', + 'cost': 2, + 'credits': 642, + 'total': 2, + 'sent': 2, + 'blacklisted': 0, + 'duplicated': 0, + 'invalid': 0, + 'npai': 0, }, 'result': { 'err': 0, 'data': { - "status": 1, - "message": "OK", - "ticket": "14672468", - "cost": 2, - "credits": 642, - "total": 2, - "sent": 2, - "blacklisted": 0, - "duplicated": 0, - "invalid": 0, - "npai": 0, + 'status': 1, + 'message': 'OK', + 'ticket': '14672468', + 'cost': 2, + 'credits': 642, + 'total': 2, + 'sent': 2, + 'blacklisted': 0, + 'duplicated': 0, + 'invalid': 0, + 'npai': 0, }, }, }, @@ -113,8 +113,8 @@ class SMSFactorSMSGateway(SMSResource): url = urllib.parse.urljoin(self.URL, endpoint) headers = { - "Authorization": f"Bearer {self.auth_token}", - "Accept": "application/json", + 'Authorization': f'Bearer {self.auth_token}', + 'Accept': 'application/json', } try: diff --git a/passerelle/apps/twilio/models.py b/passerelle/apps/twilio/models.py index 0e8ebb41..0fd4ab09 100644 --- a/passerelle/apps/twilio/models.py +++ b/passerelle/apps/twilio/models.py @@ -45,8 +45,8 @@ class TwilioSMSGateway(SMSResource): 'err': 1, 'err_desc': 'Twilio error: some destinations failed', 'data': [ - ['+33688888888', "Twilio error: my error message"], - ['+33677777777', "Twilio error: my error message"], + ['+33688888888', 'Twilio error: my error message'], + ['+33677777777', 'Twilio error: my error message'], ], }, }, diff --git a/passerelle/apps/vivaticket/models.py b/passerelle/apps/vivaticket/models.py index 332577ae..fd401c74 100644 --- a/passerelle/apps/vivaticket/models.py +++ b/passerelle/apps/vivaticket/models.py @@ -25,108 +25,108 @@ from passerelle.utils.api import endpoint from passerelle.utils.jsonresponse import APIError EVENTBOOK_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "Vivaticket", - "description": "", - "type": "object", - "required": [ - "id", - "email", - "start_datetime", - "end_datetime", - "event", - "theme", - "room", - "quantity", - "form_url", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'Vivaticket', + 'description': '', + 'type': 'object', + 'required': [ + 'id', + 'email', + 'start_datetime', + 'end_datetime', + 'event', + 'theme', + 'room', + 'quantity', + 'form_url', ], - "properties": { - "id": { - "description": "formdata id", - "type": "string", + 'properties': { + 'id': { + 'description': 'formdata id', + 'type': 'string', }, - "title": { - "description": "user title", - "type": "string", + 'title': { + 'description': 'user title', + 'type': 'string', }, - "last_name": { - "description": "user last name", - "type": "string", + 'last_name': { + 'description': 'user last name', + 'type': 'string', }, - "first_name": { - "description": "user first name", - "type": "string", + 'first_name': { + 'description': 'user first name', + 'type': 'string', }, - "social_reason": { - "description": "user social reason", - "type": "string", + 'social_reason': { + 'description': 'user social reason', + 'type': 'string', }, - "address": { - "description": "user address", - "type": "string", + 'address': { + 'description': 'user address', + 'type': 'string', }, - "zipcode": { - "description": "user zipcode", - "type": "string", + 'zipcode': { + 'description': 'user zipcode', + 'type': 'string', }, - "city": { - "description": "user city", - "type": "string", + 'city': { + 'description': 'user city', + 'type': 'string', }, - "country": { - "description": "user country", - "type": "string", + 'country': { + 'description': 'user country', + 'type': 'string', }, - "phone": { - "description": "user phone", - "type": "string", + 'phone': { + 'description': 'user phone', + 'type': 'string', }, - "mobile": { - "description": "user mobile", - "type": "string", + 'mobile': { + 'description': 'user mobile', + 'type': 'string', }, - "email": { - "description": "user email", - "type": "string", + 'email': { + 'description': 'user email', + 'type': 'string', }, - "start_datetime": { - "description": "event start datetime", - "type": "string", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}$", + 'start_datetime': { + 'description': 'event start datetime', + 'type': 'string', + 'pattern': '^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}$', }, - "end_datetime": { - "description": "event end datetime", - "type": "string", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}$", + 'end_datetime': { + 'description': 'event end datetime', + 'type': 'string', + 'pattern': '^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}$', }, - "event": { - "description": "event id", - "type": "string", + 'event': { + 'description': 'event id', + 'type': 'string', }, - "theme": { - "description": "theme id", - "type": "string", + 'theme': { + 'description': 'theme id', + 'type': 'string', }, - "room": { - "description": "room id", - "type": "string", + 'room': { + 'description': 'room id', + 'type': 'string', }, - "quantity": {"description": "quantity", "type": "string", "pattern": "^[0-9]+$"}, - "booking_comment": { - "description": "booking comment", - "type": "string", + 'quantity': {'description': 'quantity', 'type': 'string', 'pattern': '^[0-9]+$'}, + 'booking_comment': { + 'description': 'booking comment', + 'type': 'string', }, - "room_comment": { - "description": "room comment", - "type": "string", + 'room_comment': { + 'description': 'room comment', + 'type': 'string', }, - "form_url": { - "description": "form url", - "type": "string", + 'form_url': { + 'description': 'form url', + 'type': 'string', }, - "school_level": { - "description": "School Level code", - "type": "string", + 'school_level': { + 'description': 'School Level code', + 'type': 'string', }, }, } @@ -192,7 +192,7 @@ class VivaTicket(BaseResource): response = self.get(endpoint, **kwargs) json = response.json() data = [] - for setting in json.get("ListOfSettings", []): + for setting in json.get('ListOfSettings', []): data.append({'id': setting['Code'], 'text': setting['Label']}) return {'data': data} diff --git a/passerelle/base/management/commands/ensure_jsonb.py b/passerelle/base/management/commands/ensure_jsonb.py index a606bf5d..fb3c00f4 100644 --- a/passerelle/base/management/commands/ensure_jsonb.py +++ b/passerelle/base/management/commands/ensure_jsonb.py @@ -38,7 +38,7 @@ class Command(BaseCommand): 'ALTER COLUMN "%(column_name)s" TYPE jsonb USING "%(column_name)s"::jsonb' ) params = { - "schema_name": line[0], + 'schema_name': line[0], 'table_name': table_name, 'column_name': column_name, } diff --git a/passerelle/base/models.py b/passerelle/base/models.py index 70f33022..1e74ed2a 100644 --- a/passerelle/base/models.py +++ b/passerelle/base/models.py @@ -169,7 +169,7 @@ class BaseResource(models.Model): category = ( self.category if not hasattr(self.category, '_proxy____args') else self.category._proxy____args[0] ) - return "%s %s" % (slugify(category), self._meta.model_name) + return '%s %s' % (slugify(category), self._meta.model_name) def is_accessible_by(self, request): if request.user.is_superuser: @@ -264,7 +264,7 @@ class BaseResource(models.Model): hide_fields.extend(self.hide_description_fields) for field in self._meta.fields: if ( - field.name.endswith(("key", "password", "secret", "keystore", "token", "username")) + field.name.endswith(('key', 'password', 'secret', 'keystore', 'token', 'username')) or field.name in hide_fields ): continue @@ -982,7 +982,7 @@ class ProxyLogger: continue if not parsed.username and not parsed.password: continue - replaced = parsed._replace(netloc="{}:{}@{}".format('***', '***', parsed.hostname)) + replaced = parsed._replace(netloc='{}:{}@{}'.format('***', '***', parsed.hostname)) message = message.replace(url, replaced.geturl()) levelno = getattr(logging, levelname) @@ -1031,7 +1031,7 @@ class ProxyLogger: if getattr(request, 'META', None): if 'x-forwarded-for' in request.headers: - sourceip = request.headers.get('X-Forwarded-For', '').split(",")[0].strip() + sourceip = request.headers.get('X-Forwarded-For', '').split(',')[0].strip() else: sourceip = request.META.get('REMOTE_ADDR') else: diff --git a/passerelle/contrib/caluire_axel/models.py b/passerelle/contrib/caluire_axel/models.py index 6e10d82a..41597a5a 100644 --- a/passerelle/contrib/caluire_axel/models.py +++ b/passerelle/contrib/caluire_axel/models.py @@ -201,7 +201,7 @@ class CaluireAxel(BaseResource): @endpoint( display_category=_('Family account'), display_order=4, - description=_("Get information about children"), + description=_('Get information about children'), parameters={ 'NameID': {'description': _('Publik ID')}, }, @@ -214,7 +214,7 @@ class CaluireAxel(BaseResource): @endpoint( display_category=_('Family account'), display_order=5, - description=_("Get information about a child"), + description=_('Get information about a child'), parameters={ 'NameID': {'description': _('Publik ID')}, 'idpersonne': {'description': _('Child ID')}, @@ -298,7 +298,7 @@ class CaluireAxel(BaseResource): @endpoint( display_category=_('Schooling'), display_order=1, - description=_("Get school list"), + description=_('Get school list'), parameters={ 'num': {'description': _('Address: number')}, 'street': {'description': _('Address: street')}, @@ -348,7 +348,7 @@ class CaluireAxel(BaseResource): @endpoint( display_category=_('Schooling'), display_order=2, - description=_("Get information about schooling of a child"), + description=_('Get information about schooling of a child'), parameters={ 'NameID': {'description': _('Publik ID')}, 'idpersonne': {'description': _('Child ID')}, @@ -427,7 +427,7 @@ class CaluireAxel(BaseResource): @endpoint( display_category=_('Schooling'), display_order=3, - description=_("Get information about activities of a child for the year"), + description=_('Get information about activities of a child for the year'), parameters={ 'NameID': {'description': _('Publik ID')}, 'idpersonne': {'description': _('Child ID')}, @@ -477,7 +477,7 @@ class CaluireAxel(BaseResource): @endpoint( display_category=_('Schooling'), display_order=4, - description=_("Register a child for an activity"), + description=_('Register a child for an activity'), parameters={ 'NameID': {'description': _('Publik ID')}, }, @@ -662,7 +662,7 @@ class CaluireAxel(BaseResource): @endpoint( display_category=_('Schooling'), display_order=5, - description=_("Get agenda for an activity and a child"), + description=_('Get agenda for an activity and a child'), parameters={ 'NameID': {'description': _('Publik ID')}, 'idpersonne': {'description': _('Child ID')}, @@ -743,7 +743,7 @@ class CaluireAxel(BaseResource): @endpoint( display_category=_('Schooling'), display_order=6, - description=_("Get periscolaire agenda for a child"), + description=_('Get periscolaire agenda for a child'), parameters={ 'NameID': {'description': _('Publik ID')}, 'idpersonne': {'description': _('Child ID')}, @@ -757,7 +757,7 @@ class CaluireAxel(BaseResource): @endpoint( display_category=_('Schooling'), display_order=7, - description=_("Get full agenda for a child"), + description=_('Get full agenda for a child'), parameters={ 'NameID': {'description': _('Publik ID')}, 'idpersonne': {'description': _('Child ID')}, @@ -844,7 +844,7 @@ class CaluireAxel(BaseResource): continue if activity_id not in exclusive_activity_ids: continue - exclusive_activities["%s:%s" % (day, self.get_activity_type(activity_id))].add(booking) + exclusive_activities['%s:%s' % (day, self.get_activity_type(activity_id))].add(booking) # build list of existing booked days legacy_exclusive_activities = defaultdict(set) for activity_id in exclusive_activity_ids: @@ -857,7 +857,7 @@ class CaluireAxel(BaseResource): continue if activity_id not in exclusive_activity_ids: continue - legacy_exclusive_activities["%s:%s" % (day, self.get_activity_type(activity_id))].add( + legacy_exclusive_activities['%s:%s' % (day, self.get_activity_type(activity_id))].add( booking['id'] ) # check booking exclusivity for changes only @@ -923,7 +923,7 @@ class CaluireAxel(BaseResource): @endpoint( display_category=_('Schooling'), display_order=8, - description=_("Set agenda for a child"), + description=_('Set agenda for a child'), parameters={ 'NameID': {'description': _('Publik ID')}, }, @@ -962,7 +962,7 @@ class CaluireAxel(BaseResource): @endpoint( display_category=_('Schooling'), display_order=9, - description=_("Set agenda for a child, from changes applied to another child"), + description=_('Set agenda for a child, from changes applied to another child'), parameters={ 'NameID': {'description': _('Publik ID')}, }, @@ -1040,7 +1040,7 @@ class CaluireAxel(BaseResource): @endpoint( display_category=_('Schooling'), display_order=10, - description=_("Set activity agenda for a child with a typical week"), + description=_('Set activity agenda for a child with a typical week'), parameters={ 'NameID': {'description': _('Publik ID')}, }, @@ -1178,7 +1178,7 @@ class CaluireAxel(BaseResource): name='regie', pattern=r'^(?P[\w-]+)/invoices/?$', example_pattern='{regie_id}/invoices', - description=_("Get invoices to pay"), + description=_('Get invoices to pay'), parameters={ 'NameID': {'description': _('Publik ID')}, 'regie_id': {'description': _('Regie identifier'), 'example_value': 'ENF'}, @@ -1195,7 +1195,7 @@ class CaluireAxel(BaseResource): name='regie', pattern=r'^(?P[\w-]+)/invoices/history/?$', example_pattern='{regie_id}/invoices/history', - description=_("Get invoices already paid"), + description=_('Get invoices already paid'), parameters={ 'NameID': {'description': _('Publik ID')}, 'regie_id': {'description': _('Regie identifier'), 'example_value': 'ENF'}, diff --git a/passerelle/contrib/dpark/models.py b/passerelle/contrib/dpark/models.py index 5d013a43..eedba93a 100644 --- a/passerelle/contrib/dpark/models.py +++ b/passerelle/contrib/dpark/models.py @@ -541,7 +541,7 @@ class DPark(BaseResource): value_errors = is_erroneous(value, ('filename', 'content'), silent=True) if value_errors: - value_errors = ["<%s> " % key + error for error in value_errors] + value_errors = ['<%s> ' % key + error for error in value_errors] errors.extend(value_errors) continue diff --git a/passerelle/contrib/fake_family/default_database.py b/passerelle/contrib/fake_family/default_database.py index d9cf0b40..4d6041cd 100644 --- a/passerelle/contrib/fake_family/default_database.py +++ b/passerelle/contrib/fake_family/default_database.py @@ -33,16 +33,16 @@ def default_database(): if i > 5: amount = total_amount else: - amount = "0.00" # paid - invoices["F%d%0.2d-%d" % (day.year, day.month, random.randint(100, 999))] = { - "amount": amount, - "label": "facture du %d/%d/%d" % (day.day, day.month, day.year), - "total_amount": total_amount, - "online_payment": i % 3 != 0, - "created": day.strftime('%Y-%m-%d'), - "pay_limit_date": limit.strftime('%Y-%m-%d'), - "has_pdf": True, - "keywords": [], + amount = '0.00' # paid + invoices['F%d%0.2d-%d' % (day.year, day.month, random.randint(100, 999))] = { + 'amount': amount, + 'label': 'facture du %d/%d/%d' % (day.day, day.month, day.year), + 'total_amount': total_amount, + 'online_payment': i % 3 != 0, + 'created': day.strftime('%Y-%m-%d'), + 'pay_limit_date': limit.strftime('%Y-%m-%d'), + 'has_pdf': True, + 'keywords': [], } adults = {} @@ -53,7 +53,7 @@ def default_database(): adult['password'] = 'pass%d' % i adult['email'] = 'p%d@example.net' % i adult['text'] = '%(first_name)s %(last_name)s' % adult - adult['birthdate'] = "%d-%0.2d-18" % (now.year - random.randint(20, 40), random.randint(1, 12)) + adult['birthdate'] = '%d-%0.2d-18' % (now.year - random.randint(20, 40), random.randint(1, 12)) adult['phone'] = '0122334455' adult['cellphone'] = '0655443322' adult['invoices'] = [] @@ -64,7 +64,7 @@ def default_database(): adult2.update(randomnames.person('adult')) adult2['last_name'] = adult['last_name'] adult2['text'] = '%(first_name)s %(last_name)s' % adult2 - adult2['birthdate'] = "%d-%0.2d-18" % (now.year - random.randint(20, 40), random.randint(1, 12)) + adult2['birthdate'] = '%d-%0.2d-18' % (now.year - random.randint(20, 40), random.randint(1, 12)) adult2['login'] = 'p%d@example.net' % (i + 1) adult2['password'] = 'pass%d' % (i + 1) adult2['email'] = 'p%d@example.net' % (i + 1) @@ -75,59 +75,59 @@ def default_database(): for i in range(1, 51): child = randomnames.person('child') child['text'] = '%(first_name)s %(last_name)s' % child - child['birthdate'] = "%d-%0.2d-18" % (now.year - random.randint(1, 14), random.randint(1, 12)) - child['keywords'] = ["naissance-en-" + child['birthdate'][:4]] + child['birthdate'] = '%d-%0.2d-18' % (now.year - random.randint(1, 14), random.randint(1, 12)) + child['keywords'] = ['naissance-en-' + child['birthdate'][:4]] child['id'] = i children['%d' % i] = child database = { - "links": { + 'links': { # "nameid": "adult_id", - "fake_nameid": "1", + 'fake_nameid': '1', }, - "adults": adults, - "children": children, - "invoices": invoices, - "families": { - "1": { - "id": "1", - "adults": ["1", "2"], - "children": ["1", "2"], - "contacts": ["10"], - "invoices": [], - "keywords": ["deux-enfants", "habitant-14eme"], + 'adults': adults, + 'children': children, + 'invoices': invoices, + 'families': { + '1': { + 'id': '1', + 'adults': ['1', '2'], + 'children': ['1', '2'], + 'contacts': ['10'], + 'invoices': [], + 'keywords': ['deux-enfants', 'habitant-14eme'], }, - "2": { - "id": "2", - "adults": ["3"], - "children": ["3", "4"], - "contacts": [], - "invoices": [], - "keywords": ["deux-enfants", "habitant-14eme", "mono-parentale"], + '2': { + 'id': '2', + 'adults': ['3'], + 'children': ['3', '4'], + 'contacts': [], + 'invoices': [], + 'keywords': ['deux-enfants', 'habitant-14eme', 'mono-parentale'], }, - "3": { - "id": "3", - "adults": ["5", "6"], - "children": ["5"], - "contacts": [], - "invoices": [], - "keywords": ["un-enfant", "habitant-14eme"], + '3': { + 'id': '3', + 'adults': ['5', '6'], + 'children': ['5'], + 'contacts': [], + 'invoices': [], + 'keywords': ['un-enfant', 'habitant-14eme'], }, - "4": { - "id": "4", - "adults": ["7"], - "children": ["6"], - "contacts": [], - "invoices": [], - "keywords": ["un-enfant", "habitant-14eme", "mono-parentale"], + '4': { + 'id': '4', + 'adults': ['7'], + 'children': ['6'], + 'contacts': [], + 'invoices': [], + 'keywords': ['un-enfant', 'habitant-14eme', 'mono-parentale'], }, - "5": { - "id": "5", - "adults": ["9", "10"], - "children": [], - "contacts": ["15"], - "invoices": [], - "keywords": ["sans-enfant", "habitant-14eme"], + '5': { + 'id': '5', + 'adults': ['9', '10'], + 'children': [], + 'contacts': ['15'], + 'invoices': [], + 'keywords': ['sans-enfant', 'habitant-14eme'], }, }, } diff --git a/passerelle/contrib/greco/models.py b/passerelle/contrib/greco/models.py index a6231c7e..0057fb51 100644 --- a/passerelle/contrib/greco/models.py +++ b/passerelle/contrib/greco/models.py @@ -128,10 +128,10 @@ class Greco(BaseResource): HttpAuthenticated.__init__(self) def send(self, request): - request.message = request.message.replace(b"contentType", b"xm:contentType") + request.message = request.message.replace(b'contentType', b'xm:contentType') if self.attachments: # SOAP Attachement format - message = MIMEMultipart('related', type="text/xml", start="") + message = MIMEMultipart('related', type='text/xml', start='') xml = MIMEText(None, _subtype='xml', _charset='utf-8') xml.add_header('Content-ID', '') # do not base64-encode the soap message diff --git a/passerelle/contrib/grenoble_gru/models.py b/passerelle/contrib/grenoble_gru/models.py index 5adff4ad..6103a407 100644 --- a/passerelle/contrib/grenoble_gru/models.py +++ b/passerelle/contrib/grenoble_gru/models.py @@ -63,12 +63,12 @@ def check_value(data, field_name, values): def strip_emoji(value): emoji_pattern = re.compile( - "[" - "\U00002700-\U000027BF" # Dingbats - "\U0001F300-\U0001F5FF" # symbols & pictographs - "\U0001F600-\U0001F64F" # emoticons - "\U0001F680-\U0001F6FF" # transport & map symbols - "]+", + '[' + '\U00002700-\U000027BF' # Dingbats + '\U0001F300-\U0001F5FF' # symbols & pictographs + '\U0001F600-\U0001F64F' # emoticons + '\U0001F680-\U0001F6FF' # transport & map symbols + ']+', ) return emoji_pattern.sub(r'', value) @@ -83,7 +83,7 @@ class GrenobleGRU(BaseResource): category = _('Business Process Connectors') class Meta: - verbose_name = "Grenoble - Gestion des signalements" + verbose_name = 'Grenoble - Gestion des signalements' @classmethod def get_verbose_name(cls): diff --git a/passerelle/contrib/isere_ens/models.py b/passerelle/contrib/isere_ens/models.py index 5ee03ec8..9d39705d 100644 --- a/passerelle/contrib/isere_ens/models.py +++ b/passerelle/contrib/isere_ens/models.py @@ -36,133 +36,133 @@ API_VERSION = [ API_VERSION_DEFAULT = '1.0.0' SITE_BOOKING_SCHOOL_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "ENS site/booking/school", - "description": "", - "type": "object", - "required": [ - "site", - "date", - "pmr", - "morning", - "lunch", - "afternoon", - "participants", - "grade_levels", - "beneficiary_first_name", - "beneficiary_last_name", - "beneficiary_email", - "beneficiary_phone", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'ENS site/booking/school', + 'description': '', + 'type': 'object', + 'required': [ + 'site', + 'date', + 'pmr', + 'morning', + 'lunch', + 'afternoon', + 'participants', + 'grade_levels', + 'beneficiary_first_name', + 'beneficiary_last_name', + 'beneficiary_email', + 'beneficiary_phone', ], - "properties": OrderedDict( + 'properties': OrderedDict( { - "external_id": { - "description": "external id", - "type": "string", + 'external_id': { + 'description': 'external id', + 'type': 'string', }, - "site": { - "description": "site id (code)", - "type": "string", + 'site': { + 'description': 'site id (code)', + 'type': 'string', }, - "project": { - "description": "project code", - "type": "string", + 'project': { + 'description': 'project code', + 'type': 'string', }, - "date": { - "description": "booking date (format: YYYY-MM-DD)", - "type": "string", + 'date': { + 'description': 'booking date (format: YYYY-MM-DD)', + 'type': 'string', }, - "pmr": { - "description": "PMR", - "type": "boolean", + 'pmr': { + 'description': 'PMR', + 'type': 'boolean', }, - "morning": { - "description": "morning booking", - "type": "boolean", + 'morning': { + 'description': 'morning booking', + 'type': 'boolean', }, - "lunch": { - "description": "lunch booking", - "type": "boolean", + 'lunch': { + 'description': 'lunch booking', + 'type': 'boolean', }, - "afternoon": { - "description": "afternoon booking", - "type": "boolean", + 'afternoon': { + 'description': 'afternoon booking', + 'type': 'boolean', }, - "participants": { - "description": "number of participants", - "type": "string", - "pattern": "^[0-9]+$", + 'participants': { + 'description': 'number of participants', + 'type': 'string', + 'pattern': '^[0-9]+$', }, - "animator": { - "description": "animator id", - "type": "string", - "pattern": "^[0-9]*$", + 'animator': { + 'description': 'animator id', + 'type': 'string', + 'pattern': '^[0-9]*$', }, - "group": { - "description": "school group id (API v2.1.0/v2.1.1, use applicant if empty)", - "type": "string", - "pattern": "^[0-9]*$", + 'group': { + 'description': 'school group id (API v2.1.0/v2.1.1, use applicant if empty)', + 'type': 'string', + 'pattern': '^[0-9]*$', }, - "applicant": { - "description": "applicant", - "type": "string", + 'applicant': { + 'description': 'applicant', + 'type': 'string', }, - "grade_levels": { - "description": "grade levels", - "type": "array", - "items": { - "type": "string", - "description": "level", + 'grade_levels': { + 'description': 'grade levels', + 'type': 'array', + 'items': { + 'type': 'string', + 'description': 'level', }, }, - "beneficiary_first_name": { - "description": "beneficiary first name", - "type": "string", + 'beneficiary_first_name': { + 'description': 'beneficiary first name', + 'type': 'string', }, - "beneficiary_last_name": { - "description": "beneficiary last name", - "type": "string", + 'beneficiary_last_name': { + 'description': 'beneficiary last name', + 'type': 'string', }, - "beneficiary_email": { - "description": "beneficiary email", - "type": "string", + 'beneficiary_email': { + 'description': 'beneficiary email', + 'type': 'string', }, - "beneficiary_phone": { - "description": "beneficiary phone number", - "type": "string", + 'beneficiary_phone': { + 'description': 'beneficiary phone number', + 'type': 'string', }, - "beneficiary_cellphone": { - "description": "beneficiary cell phone number", - "type": "string", + 'beneficiary_cellphone': { + 'description': 'beneficiary cell phone number', + 'type': 'string', }, # v1.0.0 only - "code": { - "description": "booking code (API v1.0.0)", - "type": "string", + 'code': { + 'description': 'booking code (API v1.0.0)', + 'type': 'string', }, - "status": { - "description": "booking status (API v1.0.0)", - "type": "string", + 'status': { + 'description': 'booking status (API v1.0.0)', + 'type': 'string', }, - "beneficiary_id": { - "description": "beneficiary id (API v1.0.0)", - "type": "string", + 'beneficiary_id': { + 'description': 'beneficiary id (API v1.0.0)', + 'type': 'string', }, - "public": { - "description": "public (API v1.0.0)", - "type": "string", + 'public': { + 'description': 'public (API v1.0.0)', + 'type': 'string', }, - "entity_id": { - "description": "entity/school id (UAI/RNE) (API v1.0.0)", - "type": "string", + 'entity_id': { + 'description': 'entity/school id (UAI/RNE) (API v1.0.0)', + 'type': 'string', }, - "entity_name": { - "description": "entity/school name (API v1.0.0)", - "type": "string", + 'entity_name': { + 'description': 'entity/school name (API v1.0.0)', + 'type': 'string', }, - "entity_type": { - "description": "entity/school type (API v1.0.0)", - "type": "string", + 'entity_type': { + 'description': 'entity/school type (API v1.0.0)', + 'type': 'string', }, } ), @@ -170,15 +170,15 @@ SITE_BOOKING_SCHOOL_SCHEMA = { class IsereENS(BaseResource, HTTPResource): - category = _("Business Process Connectors") + category = _('Business Process Connectors') base_url = models.URLField( - verbose_name=_("Webservice Base URL"), - help_text=_("Base API URL (before /api/...)"), + verbose_name=_('Webservice Base URL'), + help_text=_('Base API URL (before /api/...)'), ) - token = models.CharField(verbose_name=_("Access token"), max_length=128) + token = models.CharField(verbose_name=_('Access token'), max_length=128) api_version = models.CharField( - verbose_name=_("API version"), max_length=10, choices=API_VERSION, default=API_VERSION_DEFAULT + verbose_name=_('API version'), max_length=10, choices=API_VERSION, default=API_VERSION_DEFAULT ) class Meta: @@ -186,7 +186,7 @@ class IsereENS(BaseResource, HTTPResource): def request(self, endpoint, params=None, json=None, method='get'): url = urlparse.urljoin(self.base_url, endpoint) - headers = {"token": self.token} + headers = {'token': self.token} if method == 'post' or json is not None: response = self.requests.post(url, params=params, json=json, headers=headers) else: @@ -198,169 +198,169 @@ class IsereENS(BaseResource, HTTPResource): except ValueError: json_content = None raise APIError( - "error status:%s %r, content:%r" + 'error status:%s %r, content:%r' % (response.status_code, response.reason, response.content[:1024]), data={ - "status_code": response.status_code, - "json_content": json_content, + 'status_code': response.status_code, + 'json_content': json_content, }, ) if response.status_code == 204: # 204 No Content - raise APIError("abnormal empty response") + raise APIError('abnormal empty response') try: return response.json() except ValueError: - raise APIError("invalid JSON in response: %r" % response.content[:1024]) + raise APIError('invalid JSON in response: %r' % response.content[:1024]) @endpoint( - name="sites", - description=_("Sites"), + name='sites', + description=_('Sites'), display_order=1, parameters={ - "q": {"description": _("Search text in name field")}, - "id": { - "description": _("Returns site with code=id"), + 'q': {'description': _('Search text in name field')}, + 'id': { + 'description': _('Returns site with code=id'), }, - "kind": { - "description": _("Returns only sites of this kind (school_group or social)"), + 'kind': { + 'description': _('Returns only sites of this kind (school_group or social)'), }, }, ) def sites(self, request, q=None, id=None, kind=None): if id is not None: - site = self.request("api/%s/site/%s" % (self.api_version, id)) - site["id"] = site["code"] - site["text"] = "%(name)s (%(city)s)" % site + site = self.request('api/%s/site/%s' % (self.api_version, id)) + site['id'] = site['code'] + site['text'] = '%(name)s (%(city)s)' % site sites = [site] else: - cache_key = "isere-ens-sites-%d" % self.id + cache_key = 'isere-ens-sites-%d' % self.id sites = cache.get(cache_key) if not sites: - sites = self.request("api/%s/site" % self.api_version) + sites = self.request('api/%s/site' % self.api_version) for site in sites: - site["id"] = site["code"] - site["text"] = "%(name)s (%(city)s)" % site + site['id'] = site['code'] + site['text'] = '%(name)s (%(city)s)' % site cache.set(cache_key, sites, 300) if kind is not None: sites = [site for site in sites if site.get(kind)] if q is not None: q = simplify(q) - sites = [site for site in sites if q in simplify(site["text"])] - return {"data": sites} + sites = [site for site in sites if q in simplify(site['text'])] + return {'data': sites} @endpoint( - name="animators", - description=_("Animators"), + name='animators', + description=_('Animators'), display_order=2, parameters={ - "q": {"description": _("Search text in name field")}, - "id": { - "description": _("Returns animator number id"), + 'q': {'description': _('Search text in name field')}, + 'id': { + 'description': _('Returns animator number id'), }, }, ) def animators(self, request, q=None, id=None): - cache_key = "isere-ens-animators-%d" % self.id + cache_key = 'isere-ens-animators-%d' % self.id animators = cache.get(cache_key) if not animators: - animators = self.request("api/%s/schoolAnimator" % self.api_version) + animators = self.request('api/%s/schoolAnimator' % self.api_version) for animator in animators: - animator["id"] = str(animator["id"]) - animator["text"] = "%(first_name)s %(last_name)s <%(email)s> (%(entity)s)" % animator + animator['id'] = str(animator['id']) + animator['text'] = '%(first_name)s %(last_name)s <%(email)s> (%(entity)s)' % animator cache.set(cache_key, animators, 300) if id is not None: - animators = [animator for animator in animators if animator["id"] == id] + animators = [animator for animator in animators if animator['id'] == id] if q is not None: q = simplify(q) - animators = [animator for animator in animators if q in simplify(animator["text"])] - return {"data": animators} + animators = [animator for animator in animators if q in simplify(animator['text'])] + return {'data': animators} @endpoint( - name="site-calendar", - description=_("Available bookings for a site"), + name='site-calendar', + description=_('Available bookings for a site'), display_order=3, parameters={ - "site": {"description": _("Site code (aka id)")}, - "participants": { - "description": _("Number of participants"), + 'site': {'description': _('Site code (aka id)')}, + 'participants': { + 'description': _('Number of participants'), }, - "start_date": { - "description": _("First date of the calendar (format: YYYY-MM-DD, default: today)"), + 'start_date': { + 'description': _('First date of the calendar (format: YYYY-MM-DD, default: today)'), }, - "end_date": { - "description": _( - "Last date of the calendar (format: YYYY-MM-DD, default: start_date + 92 days)" + 'end_date': { + 'description': _( + 'Last date of the calendar (format: YYYY-MM-DD, default: start_date + 92 days)' ), }, }, ) - def site_calendar(self, request, site, participants="1", start_date=None, end_date=None): + def site_calendar(self, request, site, participants='1', start_date=None, end_date=None): if start_date: try: - start_date = datetime.datetime.strptime(start_date, "%Y-%m-%d").date() + start_date = datetime.datetime.strptime(start_date, '%Y-%m-%d').date() except ValueError: raise APIError( - "bad start_date format (%s), should be YYYY-MM-DD" % start_date, + 'bad start_date format (%s), should be YYYY-MM-DD' % start_date, http_status=400, ) else: start_date = datetime.date.today() if end_date: try: - end_date = datetime.datetime.strptime(end_date, "%Y-%m-%d").date() + end_date = datetime.datetime.strptime(end_date, '%Y-%m-%d').date() except ValueError: raise APIError( - "bad end_date format (%s), should be YYYY-MM-DD" % end_date, + 'bad end_date format (%s), should be YYYY-MM-DD' % end_date, http_status=400, ) else: end_date = start_date + datetime.timedelta(days=92) params = { - "participants": participants, - "start_date": start_date.strftime("%Y-%m-%d"), - "end_date": end_date.strftime("%Y-%m-%d"), + 'participants': participants, + 'start_date': start_date.strftime('%Y-%m-%d'), + 'end_date': end_date.strftime('%Y-%m-%d'), } - dates = self.request("api/%s/site/%s/calendar" % (self.api_version, site), params=params) + dates = self.request('api/%s/site/%s/calendar' % (self.api_version, site), params=params) def status_name(status): return { - "AVAILABLE": _("available"), - "COMPLETE": _("complete"), - "OVERBOOKING": _("overbooking"), - "OPEN": _("open"), - "CLOSE": _("closed"), - }.get(status) or _("unknown") + 'AVAILABLE': _('available'), + 'COMPLETE': _('complete'), + 'OVERBOOKING': _('overbooking'), + 'OPEN': _('open'), + 'CLOSE': _('closed'), + }.get(status) or _('unknown') for date in dates: - date["id"] = site + ":" + date["date"] - date["site"] = site - date_ = datetime.datetime.strptime(date["date"], "%Y-%m-%d").date() - date["date_format"] = date_format(date_, format="DATE_FORMAT") - date["date_number"] = date_format(date_, format="d") - date["date_weekday"] = date_format(date_, format="l") - date["date_weekdayindex"] = date_format(date_, format="w") - date["date_weeknumber"] = date_format(date_, format="W") - date["date_month"] = date_format(date_, format="F Y") - date["disabled"] = False - date["status"] = "open" - for period in ("morning", "lunch", "afternoon"): - date["%s_status" % period] = status_name(date[period]) - for period in ("morning", "afternoon"): - if date[period] in ("COMPLETE", "CLOSE"): - if date["status"] == "partially-open": - date["disabled"] = True - date["status"] = "closed" + date['id'] = site + ':' + date['date'] + date['site'] = site + date_ = datetime.datetime.strptime(date['date'], '%Y-%m-%d').date() + date['date_format'] = date_format(date_, format='DATE_FORMAT') + date['date_number'] = date_format(date_, format='d') + date['date_weekday'] = date_format(date_, format='l') + date['date_weekdayindex'] = date_format(date_, format='w') + date['date_weeknumber'] = date_format(date_, format='W') + date['date_month'] = date_format(date_, format='F Y') + date['disabled'] = False + date['status'] = 'open' + for period in ('morning', 'lunch', 'afternoon'): + date['%s_status' % period] = status_name(date[period]) + for period in ('morning', 'afternoon'): + if date[period] in ('COMPLETE', 'CLOSE'): + if date['status'] == 'partially-open': + date['disabled'] = True + date['status'] = 'closed' else: - date["status"] = "partially-open" - date["details"] = ( - _("Morning (%(morning_status)s), Lunch (%(lunch_status)s), Afternoon (%(afternoon_status)s)") + date['status'] = 'partially-open' + date['details'] = ( + _('Morning (%(morning_status)s), Lunch (%(lunch_status)s), Afternoon (%(afternoon_status)s)') % date ) - date["text"] = "%(date_format)s - %(details)s" % date - return {"data": dates} + date['text'] = '%(date_format)s - %(details)s' % date + return {'data': dates} def site_booking_v1(self, request, post_data): for key in ( @@ -377,56 +377,56 @@ class IsereENS(BaseResource, HTTPResource): if key not in post_data: raise APIError('%s is mandatory (API v1.0.0)' % key, err_code='bad-request', http_status=400) payload = { - "code": post_data["code"], - "status": post_data["status"], - "beneficiary": { - "id": post_data["beneficiary_id"], - "firstName": post_data["beneficiary_first_name"], - "lastName": post_data["beneficiary_last_name"], - "email": post_data["beneficiary_email"], - "phone": post_data["beneficiary_phone"], - "cellphone": post_data.get("beneficiary_cellphone", ""), + 'code': post_data['code'], + 'status': post_data['status'], + 'beneficiary': { + 'id': post_data['beneficiary_id'], + 'firstName': post_data['beneficiary_first_name'], + 'lastName': post_data['beneficiary_last_name'], + 'email': post_data['beneficiary_email'], + 'phone': post_data['beneficiary_phone'], + 'cellphone': post_data.get('beneficiary_cellphone', ''), }, - "entity": { - "id": post_data["entity_id"], - "name": post_data["entity_name"], - "type": post_data["entity_type"], + 'entity': { + 'id': post_data['entity_id'], + 'name': post_data['entity_name'], + 'type': post_data['entity_type'], }, - "booking": { - "projectCode": post_data.get("project"), - "siteCode": post_data["site"], - "applicant": post_data["applicant"], - "public": post_data["public"], - "bookingDate": post_data["date"], - "participants": int(post_data["participants"]), - "morning": post_data["morning"], - "lunch": post_data["lunch"], - "afternoon": post_data["afternoon"], - "pmr": post_data["pmr"], - "gradeLevels": post_data["grade_levels"], + 'booking': { + 'projectCode': post_data.get('project'), + 'siteCode': post_data['site'], + 'applicant': post_data['applicant'], + 'public': post_data['public'], + 'bookingDate': post_data['date'], + 'participants': int(post_data['participants']), + 'morning': post_data['morning'], + 'lunch': post_data['lunch'], + 'afternoon': post_data['afternoon'], + 'pmr': post_data['pmr'], + 'gradeLevels': post_data['grade_levels'], }, } - if post_data.get("animator"): - payload["booking"]["schoolAnimator"] = int(post_data["animator"]) + if post_data.get('animator'): + payload['booking']['schoolAnimator'] = int(post_data['animator']) - booking = self.request("api/1.0.0/booking", json=payload) + booking = self.request('api/1.0.0/booking', json=payload) if not isinstance(booking, dict): - raise APIError("response is not a dict", data=booking) - if "status" not in booking: - raise APIError("no status in response", data=booking) - if booking["status"] not in ("BOOKING", "OVERBOOKING"): - raise APIError("booking status is %s" % booking["status"], data=booking) - return {"data": booking} + raise APIError('response is not a dict', data=booking) + if 'status' not in booking: + raise APIError('no status in response', data=booking) + if booking['status'] not in ('BOOKING', 'OVERBOOKING'): + raise APIError('booking status is %s' % booking['status'], data=booking) + return {'data': booking} @endpoint( - name="site-booking", - description=_("Book a site for a school"), + name='site-booking', + description=_('Book a site for a school'), display_order=4, - methods=["post"], + methods=['post'], post={ - "request_body": { - "schema": { - "application/json": SITE_BOOKING_SCHOOL_SCHEMA, + 'request_body': { + 'schema': { + 'application/json': SITE_BOOKING_SCHOOL_SCHEMA, } } }, @@ -435,20 +435,20 @@ class IsereENS(BaseResource, HTTPResource): if self.api_version == '1.0.0': return self.site_booking_v1(request, post_data) payload = { - "siteCode": post_data["site"], - "bookingDate": post_data["date"], - "pmr": post_data["pmr"], - "morning": post_data["morning"], - "lunch": post_data["lunch"], - "afternoon": post_data["afternoon"], - "participants": int(post_data["participants"]), - "gradeLevels": post_data["grade_levels"], - "beneficiary": { - "firstName": post_data["beneficiary_first_name"], - "lastName": post_data["beneficiary_last_name"], - "email": post_data["beneficiary_email"], - "phone": post_data["beneficiary_phone"], - "cellphone": post_data.get("beneficiary_cellphone", ""), + 'siteCode': post_data['site'], + 'bookingDate': post_data['date'], + 'pmr': post_data['pmr'], + 'morning': post_data['morning'], + 'lunch': post_data['lunch'], + 'afternoon': post_data['afternoon'], + 'participants': int(post_data['participants']), + 'gradeLevels': post_data['grade_levels'], + 'beneficiary': { + 'firstName': post_data['beneficiary_first_name'], + 'lastName': post_data['beneficiary_last_name'], + 'email': post_data['beneficiary_email'], + 'phone': post_data['beneficiary_phone'], + 'cellphone': post_data.get('beneficiary_cellphone', ''), }, } if post_data.get('group'): @@ -471,19 +471,19 @@ class IsereENS(BaseResource, HTTPResource): booking = self.request('api/' + self.api_version + '/site/booking/school', json=payload) if not isinstance(booking, dict): - raise APIError("response is not a dict", data=booking) - if "status" not in booking: - raise APIError("no status in response", data=booking) - if booking["status"] not in ("BOOKING", "OVERBOOKING"): - raise APIError("booking status is %s" % booking["status"], data=booking) - return {"data": booking} + raise APIError('response is not a dict', data=booking) + if 'status' not in booking: + raise APIError('no status in response', data=booking) + if booking['status'] not in ('BOOKING', 'OVERBOOKING'): + raise APIError('booking status is %s' % booking['status'], data=booking) + return {'data': booking} @endpoint( - name="get-site-booking", - description=_("Booking status"), + name='get-site-booking', + description=_('Booking status'), display_order=5, parameters={ - "code": {"description": _('Booking Code (API v1.0.0) or External ID (API v2.1.0/v2.1.1)')}, + 'code': {'description': _('Booking Code (API v1.0.0) or External ID (API v2.1.0/v2.1.1)')}, }, ) def get_site_booking(self, request, code): @@ -498,12 +498,12 @@ class IsereENS(BaseResource, HTTPResource): return {'data': status} @endpoint( - name="cancel-site-booking", - description=_("Cancel a booking"), - methods=["post"], + name='cancel-site-booking', + description=_('Cancel a booking'), + methods=['post'], display_order=6, parameters={ - "code": {"description": _('External ID')}, + 'code': {'description': _('External ID')}, }, ) def cancel_booking(self, request, code): diff --git a/passerelle/contrib/iws/models.py b/passerelle/contrib/iws/models.py index b3317e07..b5d48b1c 100644 --- a/passerelle/contrib/iws/models.py +++ b/passerelle/contrib/iws/models.py @@ -27,53 +27,53 @@ from passerelle.utils.api import endpoint from passerelle.utils.conversion import any2bool from passerelle.utils.jsonresponse import APIError -TYPE_AGENDA = {"DECHET": "DECHETS VERTS", "ENCOMBRANT": "ENCOMBRANTS"} -C_TYPEPB = {"DECHET": "8006", "ENCOMBRANT": "8008"} -SYNDIC_C_TYPEPB = {"DECHET": "8007", "ENCOMBRANT": "8009"} +TYPE_AGENDA = {'DECHET': 'DECHETS VERTS', 'ENCOMBRANT': 'ENCOMBRANTS'} +C_TYPEPB = {'DECHET': '8006', 'ENCOMBRANT': '8008'} +SYNDIC_C_TYPEPB = {'DECHET': '8007', 'ENCOMBRANT': '8009'} NS = '{http://isilog.fr}' BOOKDATE_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "IWS", - "description": "", - "type": "object", - "required": ["firstname", "lastname", "email_notif", "date", "token"], - "properties": { - "firstname": { - "description": "Firstname", - "type": "string", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'IWS', + 'description': '', + 'type': 'object', + 'required': ['firstname', 'lastname', 'email_notif', 'date', 'token'], + 'properties': { + 'firstname': { + 'description': 'Firstname', + 'type': 'string', }, - "lastname": { - "description": "Lastname", - "type": "string", + 'lastname': { + 'description': 'Lastname', + 'type': 'string', }, - "email": { - "description": "Email", - "type": "string", + 'email': { + 'description': 'Email', + 'type': 'string', }, - "email_notif": { - "description": "Email notification", - "type": "boolean", + 'email_notif': { + 'description': 'Email notification', + 'type': 'boolean', }, - "description": { - "description": "Description of the request", - "type": "string", + 'description': { + 'description': 'Description of the request', + 'type': 'string', }, - "tel_number": { - "description": "Telephone number", - "type": "string", + 'tel_number': { + 'description': 'Telephone number', + 'type': 'string', }, - "date": { - "description": "Booking date", - "type": "string", + 'date': { + 'description': 'Booking date', + 'type': 'string', }, - "token": { - "description": "Booking token", - "type": "string", + 'token': { + 'description': 'Booking token', + 'type': 'string', }, - "sms": { - "description": "Send sms to user before the booked date", + 'sms': { + 'description': 'Send sms to user before the booked date', }, }, } @@ -103,7 +103,7 @@ class IWSConnector(BaseResource): client.set_default_soapheaders([header_value]) service = client.create_service('%sIsiHelpDeskServiceSoap' % NS, self.operation_endpoint) - self.logger.debug("calling %s method of iws", method, extra={'data': iws_data}) + self.logger.debug('calling %s method of iws', method, extra={'data': iws_data}) IsiWsEntity = client.get_type('%sIsiWsEntity' % NS) ArrayOfIsiWsDataField = client.get_type('%sArrayOfIsiWsDataField' % NS) IsiWsDataField = client.get_type('%sIsiWsDataField' % NS) @@ -123,13 +123,13 @@ class IWSConnector(BaseResource): try: tree = lxml.etree.fromstring(iws_res.encode('utf-8'), parser).getroottree() except lxml.etree.XMLSyntaxError: - raise APIError("IWS response is not valid") - result = {"status": tree.find('//Statut').text, "trace": tree.find('//Trace').text} + raise APIError('IWS response is not valid') + result = {'status': tree.find('//Statut').text, 'trace': tree.find('//Trace').text} fields = {} for data_field in tree.xpath('//IsiWsDataField'): fields[data_field.find('IsiField').text] = data_field.find('IsiValue').text result['fields'] = fields - self.logger.debug("recieved data from %s method of iws", method, extra={'data': result}) + self.logger.debug('recieved data from %s method of iws', method, extra={'data': result}) return result def _check_status(self, iws_res): @@ -168,7 +168,7 @@ class IWSConnector(BaseResource): 'I_AP_COMMUNE': city, 'I_AP_COMMUNEINTER': sti_code, 'J_PRJACTPREV': '5', - 'C_EQUIPE': "VPVIGIE", + 'C_EQUIPE': 'VPVIGIE', 'I_APP_DEMANDEUR': 'booking, demandeur', 'I_AP_ADRESSEMAIL': 'booking@localhost', 'C_TYPEPB': SYNDIC_C_TYPEPB[request_type] if syndic else C_TYPEPB[request_type], @@ -189,7 +189,7 @@ class IWSConnector(BaseResource): raw_date = raw_date.strip() date_obj = datetime.strptime(raw_date, '%d/%m/%Y').date() date_text = dateformat.format(date_obj, 'l d F Y') - dates.append({"id": raw_date, "text": date_text, "token": token}) + dates.append({'id': raw_date, 'text': date_text, 'token': token}) return result @endpoint( diff --git a/passerelle/contrib/lille_urban_card/models.py b/passerelle/contrib/lille_urban_card/models.py index a84ac19e..24480f67 100644 --- a/passerelle/contrib/lille_urban_card/models.py +++ b/passerelle/contrib/lille_urban_card/models.py @@ -65,19 +65,19 @@ class LilleUrbanCard(BaseResource): def csp(self, request, *args, **kwargs): return { 'data': [ - {'id': '2', 'text': "Commerçant·e, chef·ffe d’entreprise"}, - {'id': '3', 'text': "Cadre, profession libérale ou intellectuel·le"}, - {'id': '4', 'text': "Profession intermédiaire"}, - {'id': '5', 'text': "Employé·e"}, - {'id': '6', 'text': "Ouvrier·e"}, - {'id': '1', 'text': "Agriculteur·rice"}, - {'id': '8', 'text': "Sans profession"}, - {'id': '81', 'text': "Demandeur·se d’emploi"}, - {'id': '82', 'text': "Enfant de 0 à 11 ans"}, - {'id': '83', 'text': "Enfant de plus de 12 ans"}, - {'id': '84', 'text': "Étudiant·e"}, - {'id': '7', 'text': "Retraité·e"}, - {'id': '99', 'text': "Ne souhaite pas se prononcer"}, + {'id': '2', 'text': 'Commerçant·e, chef·ffe d’entreprise'}, + {'id': '3', 'text': 'Cadre, profession libérale ou intellectuel·le'}, + {'id': '4', 'text': 'Profession intermédiaire'}, + {'id': '5', 'text': 'Employé·e'}, + {'id': '6', 'text': 'Ouvrier·e'}, + {'id': '1', 'text': 'Agriculteur·rice'}, + {'id': '8', 'text': 'Sans profession'}, + {'id': '81', 'text': 'Demandeur·se d’emploi'}, + {'id': '82', 'text': 'Enfant de 0 à 11 ans'}, + {'id': '83', 'text': 'Enfant de plus de 12 ans'}, + {'id': '84', 'text': 'Étudiant·e'}, + {'id': '7', 'text': 'Retraité·e'}, + {'id': '99', 'text': 'Ne souhaite pas se prononcer'}, ] } diff --git a/passerelle/contrib/planitech/models.py b/passerelle/contrib/planitech/models.py index 51dc8d71..298fd480 100644 --- a/passerelle/contrib/planitech/models.py +++ b/passerelle/contrib/planitech/models.py @@ -40,169 +40,169 @@ DEFAULT_MAX_CAPACITY = 100000 CREATE_RESERVATION_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "Planitech createreservation", - "description": "", - "type": "object", - "required": [ - "date", - "start_time", - "end_time", - "place_id", - "price", - "name_id", - "first_name", - "last_name", - "email", - "activity_id", - "object", - "type_id", - "vat_rate", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'Planitech createreservation', + 'description': '', + 'type': 'object', + 'required': [ + 'date', + 'start_time', + 'end_time', + 'place_id', + 'price', + 'name_id', + 'first_name', + 'last_name', + 'email', + 'activity_id', + 'object', + 'type_id', + 'vat_rate', ], - "properties": { - "date": { - "description": "Date", - "type": "string", + 'properties': { + 'date': { + 'description': 'Date', + 'type': 'string', }, - "start_time": { - "description": "Start time", - "type": "string", + 'start_time': { + 'description': 'Start time', + 'type': 'string', }, - "end_time": { - "description": "End time", - "type": "string", + 'end_time': { + 'description': 'End time', + 'type': 'string', }, - "place_id": { - "description": "Place identifier", - "type": "number", + 'place_id': { + 'description': 'Place identifier', + 'type': 'number', }, - "price": { - "description": "Price", - "type": "number", + 'price': { + 'description': 'Price', + 'type': 'number', }, - "name_id": { - "description": "Publik user nameID", - "type": "string", + 'name_id': { + 'description': 'Publik user nameID', + 'type': 'string', }, - "first_name": { - "description": "First name", - "type": "string", + 'first_name': { + 'description': 'First name', + 'type': 'string', }, - "last_name": { - "description": "Last name", - "type": "string", + 'last_name': { + 'description': 'Last name', + 'type': 'string', }, - "email": { - "description": "Email", - "type": "string", + 'email': { + 'description': 'Email', + 'type': 'string', }, - "activity_id": { - "description": "Activity identifier", - "type": "number", + 'activity_id': { + 'description': 'Activity identifier', + 'type': 'number', }, - "object": { - "description": "Object", - "type": "string", + 'object': { + 'description': 'Object', + 'type': 'string', }, - "type_id": { - "description": "Rerservation type identifier", - "type": "number", + 'type_id': { + 'description': 'Rerservation type identifier', + 'type': 'number', }, - "vat_rate": { - "description": "VAT rate", - "type": "number", + 'vat_rate': { + 'description': 'VAT rate', + 'type': 'number', }, - "price_code": { - "description": "User price code", - "type": "string", + 'price_code': { + 'description': 'User price code', + 'type': 'string', }, }, } GET_RESERVATION_PRICE_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "Planitech getreservationprice", - "description": "", - "type": "object", - "required": [ - "date", - "start_time", - "end_time", - "place_id", - "name_id", - "first_name", - "last_name", - "email", - "activity_id", - "type_id", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'Planitech getreservationprice', + 'description': '', + 'type': 'object', + 'required': [ + 'date', + 'start_time', + 'end_time', + 'place_id', + 'name_id', + 'first_name', + 'last_name', + 'email', + 'activity_id', + 'type_id', ], - "properties": { - "date": { - "description": "Date", - "type": "string", + 'properties': { + 'date': { + 'description': 'Date', + 'type': 'string', }, - "start_time": { - "description": "Start time", - "type": "string", + 'start_time': { + 'description': 'Start time', + 'type': 'string', }, - "end_time": { - "description": "End time", - "type": "string", + 'end_time': { + 'description': 'End time', + 'type': 'string', }, - "place_id": { - "description": "Place identifier", - "type": "number", + 'place_id': { + 'description': 'Place identifier', + 'type': 'number', }, - "name_id": { - "description": "Publik user nameID", - "type": "string", + 'name_id': { + 'description': 'Publik user nameID', + 'type': 'string', }, - "first_name": { - "description": "First name", - "type": "string", + 'first_name': { + 'description': 'First name', + 'type': 'string', }, - "last_name": { - "description": "Last name", - "type": "string", + 'last_name': { + 'description': 'Last name', + 'type': 'string', }, - "email": { - "description": "Email", - "type": "string", + 'email': { + 'description': 'Email', + 'type': 'string', }, - "activity_id": { - "description": "Activity identifier", - "type": "number", + 'activity_id': { + 'description': 'Activity identifier', + 'type': 'number', }, - "type_id": { - "description": "Rerservation type identifier", - "type": "number", + 'type_id': { + 'description': 'Rerservation type identifier', + 'type': 'number', }, - "price_code": { - "description": "User price code", - "type": "string", + 'price_code': { + 'description': 'User price code', + 'type': 'string', }, }, } -RESERVATION_STATUS = {"confirmed": 3, "invalid": 0, " pre-reservation": 1, "standard": 2} +RESERVATION_STATUS = {'confirmed': 3, 'invalid': 0, ' pre-reservation': 1, 'standard': 2} UPDATE_RESERVATION_SCHEMA = { - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "Planitech updatereservation", - "description": "", - "type": "object", - "required": ["reservation_id", "status"], - "properties": { - "reservation_id": { - "description": "Reservation Identifier", - "type": "number", + '$schema': 'http://json-schema.org/draft-04/schema#', + 'title': 'Planitech updatereservation', + 'description': '', + 'type': 'object', + 'required': ['reservation_id', 'status'], + 'properties': { + 'reservation_id': { + 'description': 'Reservation Identifier', + 'type': 'number', }, - "status": { - "description": "Status of the reservation", - "type": "string", - "enum": list(RESERVATION_STATUS.keys()), + 'status': { + 'description': 'Status of the reservation', + 'type': 'string', + 'enum': list(RESERVATION_STATUS.keys()), }, }, } @@ -211,14 +211,14 @@ UPDATE_RESERVATION_SCHEMA = { def parse_date(date_str): date_obj = dateparse.parse_date(date_str) if date_obj is None: - raise APIError("Invalid date format: %s" % date_str) + raise APIError('Invalid date format: %s' % date_str) return date_obj def parse_time(time_str): timeobj = dateparse.parse_time(time_str) if timeobj is None: - raise APIError("Invalid time format: %s" % time_str) + raise APIError('Invalid time format: %s' % time_str) return timeobj @@ -232,7 +232,7 @@ def compute_hash(content, hardness, salt): def date_to_datetime(date_str): date_obj = parse_date(date_str) if date_obj is None: - raise APIError("Invalid date string: %s" % date_str) + raise APIError('Invalid date string: %s' % date_str) return datetime.combine(date_obj, time(hour=12)) @@ -307,13 +307,13 @@ class PlanitechConnector(BaseResource): kwargs['data'] = json.dumps(mste.encode(params)) response = session_meth(urlparse.urljoin(self.url, endpoint), **kwargs) if response.status_code != 200: - error_msg = "Planitech error %s" % response.status_code + error_msg = 'Planitech error %s' % response.status_code try: data = mste.decode(response.json()) if hasattr(data, 'get'): error = data.get('errors') if error: - error_msg += " - %s" % error + error_msg += ' - %s' % error except TypeError: pass raise APIError(error_msg) @@ -344,8 +344,8 @@ class PlanitechConnector(BaseResource): self.requests.post, 'getPlacesInfo', { - "placeIdentifiers": [float(key) for key in ref], - "extensionAttributes": extensionAttributes, + 'placeIdentifiers': [float(key) for key in ref], + 'extensionAttributes': extensionAttributes, }, ) for place in data['requestedPlaces']: @@ -374,7 +374,7 @@ class PlanitechConnector(BaseResource): try: min_capacity, max_capacity = int(min_capacity), int(max_capacity) except (ValueError, TypeError): - raise APIError("min_capacity and max_capacity must be integers") + raise APIError('min_capacity and max_capacity must be integers') for place_id, place_data in ref.items(): # Filter on capacity @@ -416,7 +416,7 @@ class PlanitechConnector(BaseResource): response.raise_for_status() # the last response should have set a cookie which will be used for authentication except RequestException as e: - raise APIError("Authentication to Planitec failed: %s" % str(e)) + raise APIError('Authentication to Planitec failed: %s' % str(e)) def update_or_create_user(self, post_data): dyn_price_code = post_data.get('price_code') @@ -431,15 +431,15 @@ class PlanitechConnector(BaseResource): if created: # Create planitec user params = { - "externalUserIdentifier": pairing.external_id, - "name": post_data['last_name'], - "firstName": post_data['first_name'], - "mail": post_data['email'], - "pricingCode": price_code, + 'externalUserIdentifier': pairing.external_id, + 'name': post_data['last_name'], + 'firstName': post_data['first_name'], + 'mail': post_data['email'], + 'pricingCode': price_code, } data = self._call_planitech(self.requests.post, 'createPerson', params) if data.get('creationStatus') != 'OK': - raise APIError("Person creation failed: %s" % data.get('creationStatus')) + raise APIError('Person creation failed: %s' % data.get('creationStatus')) elif dyn_price_code and pairing.price_code != dyn_price_code: # Update planitec user @@ -448,7 +448,7 @@ class PlanitechConnector(BaseResource): params = {'externalUserIdentifier': pairing.external_id, 'pricingCode': dyn_price_code} data = self._call_planitech(self.requests.post, 'updatePerson', params) if data.get('modificationStatus') != 'OK': - raise APIError("Person update failed: %s" % data.get('modificationStatus')) + raise APIError('Person update failed: %s' % data.get('modificationStatus')) return pairing @@ -465,20 +465,20 @@ class PlanitechConnector(BaseResource): pairing = self.update_or_create_user(post_data) params = { - "activityID": mste.Uint32(post_data['activity_id']), - "contractorExternalIdentifier": pairing.external_id, - "end": end_datetime, - "isWeekly": False, - "places": [float(post_data['place_id'])], - "start": start_datetime, - "typeID": mste.Uint32(post_data['type_id']), + 'activityID': mste.Uint32(post_data['activity_id']), + 'contractorExternalIdentifier': pairing.external_id, + 'end': end_datetime, + 'isWeekly': False, + 'places': [float(post_data['place_id'])], + 'start': start_datetime, + 'typeID': mste.Uint32(post_data['type_id']), } data = self._call_planitech(self.requests.post, 'getFutureReservationPrice', params) if data.get('calculationStatus') != 'OK': - raise APIError("Get reservation price failed: %s" % data.get('calculationStatus')) + raise APIError('Get reservation price failed: %s' % data.get('calculationStatus')) price = data.get('calculatedPrice', False) if price is False: - raise APIError("Get reservation price failed: no price") + raise APIError('Get reservation price failed: no price') return {'data': {'price': int(price), 'raw_data': data}} @endpoint( @@ -495,17 +495,17 @@ class PlanitechConnector(BaseResource): pairing = self.update_or_create_user(post_data) params = { - "activityID": mste.Uint32(post_data['activity_id']), - "contractorExternalIdentifier": pairing.external_id, - "end": end_datetime, - "isWeekly": False, - "object": post_data['object'], - "places": [float(post_data['place_id'])], - "price": mste.Uint32(post_data['price']), - "requestDate": request_date, - "start": start_datetime, - "typeID": mste.Uint32(post_data['type_id']), - "vatRate": mste.Uint32(post_data['vat_rate']), + 'activityID': mste.Uint32(post_data['activity_id']), + 'contractorExternalIdentifier': pairing.external_id, + 'end': end_datetime, + 'isWeekly': False, + 'object': post_data['object'], + 'places': [float(post_data['place_id'])], + 'price': mste.Uint32(post_data['price']), + 'requestDate': request_date, + 'start': start_datetime, + 'typeID': mste.Uint32(post_data['type_id']), + 'vatRate': mste.Uint32(post_data['vat_rate']), } extensions = get_extensions(post_data) if extensions: @@ -513,10 +513,10 @@ class PlanitechConnector(BaseResource): data = self._call_planitech(self.requests.post, 'createReservation', params) if data.get('creationStatus') != 'OK': - raise APIError("Reservation creation failed: %s" % data.get('creationStatus')) + raise APIError('Reservation creation failed: %s' % data.get('creationStatus')) reservation_id = data.get('reservationIdentifier') if not reservation_id: - raise APIError("Reservation creation failed: no reservation ID") + raise APIError('Reservation creation failed: no reservation ID') return {'data': {'reservation_id': int(reservation_id), 'raw_data': data}} def hourly(self): @@ -533,7 +533,7 @@ class PlanitechConnector(BaseResource): for date_obj in available_dates: date_text = dateformat.format(date_obj, 'l d F Y') short_text = dateformat.format(date_obj, 'd/m/Y') - res.append({"id": date_obj.isoformat(), "text": date_text, "short_text": short_text}) + res.append({'id': date_obj.isoformat(), 'text': date_text, 'short_text': short_text}) return res def _place_display(self, raw_data): @@ -543,7 +543,7 @@ class PlanitechConnector(BaseResource): places_ref = self._raw_get_places_referential() res = [] for place in available_places: - res.append({"id": place, "text": places_ref[place]['label']}) + res.append({'id': place, 'text': places_ref[place]['label']}) return res def _full_display(self, raw_data, places_id): @@ -643,9 +643,9 @@ class PlanitechConnector(BaseResource): # Additional parameters check valid_displays = ['date', 'place', 'full'] if display not in valid_displays: - raise APIError("Valid display are: %s" % ", ".join(valid_displays)) + raise APIError('Valid display are: %s' % ', '.join(valid_displays)) if start_date is None and start_days is None: - raise APIError("start_date or start_days is required") + raise APIError('start_date or start_days is required') # Starting date computation if start_date is not None: @@ -674,11 +674,11 @@ class PlanitechConnector(BaseResource): ).keys() params = { - "placeIdentifiers": [float(p_id) for p_id in places_id], - "startingDate": utc_start_datetime, - "endingDate": utc_end_datetime, - "requestedStartingTime": float(0), - "requestedEndingTime": duration, + 'placeIdentifiers': [float(p_id) for p_id in places_id], + 'startingDate': utc_start_datetime, + 'endingDate': utc_end_datetime, + 'requestedStartingTime': float(0), + 'requestedEndingTime': duration, } if weekdays is not None: @@ -770,8 +770,8 @@ class PlanitechConnector(BaseResource): ) def updatereservation(self, request, post_data): params = { - "reservationIdentifier": mste.Uint32(post_data['reservation_id']), - "situation": mste.Uint32(RESERVATION_STATUS[post_data['status']]), + 'reservationIdentifier': mste.Uint32(post_data['reservation_id']), + 'situation': mste.Uint32(RESERVATION_STATUS[post_data['status']]), } extensions = get_extensions(post_data) if extensions: @@ -779,7 +779,7 @@ class PlanitechConnector(BaseResource): data = self._call_planitech(self.requests.post, 'updateReservation', params) if data.get('modificationStatus') != 'OK': - raise APIError("Update reservation failed: %s" % data.get('modificationStatus')) + raise APIError('Update reservation failed: %s' % data.get('modificationStatus')) return {'data': {'raw_data': data}} def check_status(self): diff --git a/passerelle/contrib/planitech/mste.py b/passerelle/contrib/planitech/mste.py index b2f96427..a8d55971 100644 --- a/passerelle/contrib/planitech/mste.py +++ b/passerelle/contrib/planitech/mste.py @@ -111,7 +111,7 @@ class MSTEDecoder: def _parse_item(self): token = self._next_token() _type = DECODE_TOKENS[token] - return getattr(self, "_parse_%s" % _type)() + return getattr(self, '_parse_%s' % _type)() def _parse_localdate(self): timestamp = self._next_token() @@ -245,7 +245,7 @@ class MSTEEncoder: elif isinstance(obj, Uint32): self._encode_uint32(obj) else: - raise TypeError("%s encoding not supported" % type(obj)) + raise TypeError('%s encoding not supported' % type(obj)) def _encode_nil(self): self._push_token_type('nil') @@ -263,10 +263,10 @@ class MSTEEncoder: self._push(obj) def encode(self): - res = ["MSTE0102"] + res = ['MSTE0102'] self._encode_obj(self._data) nb_token = 5 + len(self._keys_store) + len(self._stream) - res = ["MSTE0102", nb_token, 'CRC00000000', 0, len(self._keys_store)] + self._keys_store + res = ['MSTE0102', nb_token, 'CRC00000000', 0, len(self._keys_store)] + self._keys_store res.extend(self._stream) return res diff --git a/passerelle/contrib/rsa13/models.py b/passerelle/contrib/rsa13/models.py index 3bc1b2e4..083017dd 100644 --- a/passerelle/contrib/rsa13/models.py +++ b/passerelle/contrib/rsa13/models.py @@ -1424,69 +1424,69 @@ class RSA13Resource(BaseResource, HTTPResource): display_order=12, json_schema_response=response_schema( { - "type": "object", - "properties": { - "avis_pi": { - "type": "object", - "properties": { - "avis": {"type": "string"}, - "date": DATE_SCHEMA, - "montant": {"type": "number"}, + 'type': 'object', + 'properties': { + 'avis_pi': { + 'type': 'object', + 'properties': { + 'avis': {'type': 'string'}, + 'date': DATE_SCHEMA, + 'montant': {'type': 'number'}, }, }, - "budget": { - "type": "object", - "properties": { - "date_reception": {"type": "string"}, - "justificatifs": { - "type": "array", - "items": { - "type": "object", - "properties": { - "conforme": {"type": "string"}, - "date_reception": DATE_SCHEMA, - "date_relance": DATE_SCHEMA, - "num_versement": {"type": "integer"}, - "reception": {"type": "string"}, - "type": {"type": "string"}, + 'budget': { + 'type': 'object', + 'properties': { + 'date_reception': {'type': 'string'}, + 'justificatifs': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'conforme': {'type': 'string'}, + 'date_reception': DATE_SCHEMA, + 'date_relance': DATE_SCHEMA, + 'num_versement': {'type': 'integer'}, + 'reception': {'type': 'string'}, + 'type': {'type': 'string'}, }, }, }, - "nombre_versements": {"type": "integer"}, + 'nombre_versements': {'type': 'integer'}, }, }, - "cloture": { - "type": "object", - "properties": { - "date_cloture": DATE_SCHEMA, - "date_relance": DATE_SCHEMA, + 'cloture': { + 'type': 'object', + 'properties': { + 'date_cloture': DATE_SCHEMA, + 'date_relance': DATE_SCHEMA, }, }, - "code_tfi": {"type": "string"}, - "decision_sai": { - "type": "object", - "properties": { - "date": DATE_SCHEMA, - "decision": {"type": "string"}, - "montant": {"type": "number"}, + 'code_tfi': {'type': 'string'}, + 'decision_sai': { + 'type': 'object', + 'properties': { + 'date': DATE_SCHEMA, + 'decision': {'type': 'string'}, + 'montant': {'type': 'number'}, }, }, - "demande": { - "type": "object", - "properties": { - "date": DATE_SCHEMA, - "montant": {"type": "number"}, + 'demande': { + 'type': 'object', + 'properties': { + 'date': DATE_SCHEMA, + 'montant': {'type': 'number'}, }, }, - "id": {"type": "integer"}, - "lib_tfi": {"type": "string"}, - "recours": { - "type": "object", - "properties": { - "date_decision": DATE_SCHEMA, - "date_demande": DATE_SCHEMA, - "decision": {"type": "string"}, - "montant": {"type": "string"}, + 'id': {'type': 'integer'}, + 'lib_tfi': {'type': 'string'}, + 'recours': { + 'type': 'object', + 'properties': { + 'date_decision': DATE_SCHEMA, + 'date_demande': DATE_SCHEMA, + 'decision': {'type': 'string'}, + 'montant': {'type': 'string'}, }, }, }, @@ -1737,23 +1737,23 @@ class RSA13Resource(BaseResource, HTTPResource): { 'type': 'array', 'items': { - "type": "object", - "properties": { - "convos_par_motif": { - "type": "array", - "items": { - "type": "object", - "properties": { - "nombre": {"type": "integer"}, - "motif": {"type": "string"}, + 'type': 'object', + 'properties': { + 'convos_par_motif': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'nombre': {'type': 'integer'}, + 'motif': {'type': 'string'}, }, }, }, - "derniere_consequence": { - "type": "object", - "properties": { - "date": DATE_SCHEMA, - "consequence": {"type": "string"}, + 'derniere_consequence': { + 'type': 'object', + 'properties': { + 'date': DATE_SCHEMA, + 'consequence': {'type': 'string'}, }, }, }, @@ -1793,22 +1793,22 @@ class RSA13Resource(BaseResource, HTTPResource): { 'type': 'array', 'items': { - "type": "object", - "properties": { - "id": {"type": "string"}, - "code_axe": {"type": "string"}, - "lib_axe": {"type": "string"}, - "code_rome": {"type": "string"}, - "lib_rome": {"type": "string"}, - "code_categorie": {"type": "string"}, - "lib_categorie": {"type": "string"}, - "lib_secteur": {"type": "string"}, - "lib_niveau": {"type": "string"}, - "lib_modalite": {"type": "string"}, - "date_inscription": DATE_SCHEMA, - "date_sortie": DATE_SCHEMA, - "motif_sortie": {"type": "string"}, - "date_dernier_ent": DATE_SCHEMA, + 'type': 'object', + 'properties': { + 'id': {'type': 'string'}, + 'code_axe': {'type': 'string'}, + 'lib_axe': {'type': 'string'}, + 'code_rome': {'type': 'string'}, + 'lib_rome': {'type': 'string'}, + 'code_categorie': {'type': 'string'}, + 'lib_categorie': {'type': 'string'}, + 'lib_secteur': {'type': 'string'}, + 'lib_niveau': {'type': 'string'}, + 'lib_modalite': {'type': 'string'}, + 'date_inscription': DATE_SCHEMA, + 'date_sortie': DATE_SCHEMA, + 'motif_sortie': {'type': 'string'}, + 'date_dernier_ent': DATE_SCHEMA, }, }, } @@ -1840,12 +1840,12 @@ class RSA13Resource(BaseResource, HTTPResource): { 'type': 'array', 'items': { - "type": "object", - "properties": { - "id": {"type": "string"}, - "text": {"type": "string"}, - "date_deb": {"type": "string"}, - "date_fin": {"type": "string"}, + 'type': 'object', + 'properties': { + 'id': {'type': 'string'}, + 'text': {'type': 'string'}, + 'date_deb': {'type': 'string'}, + 'date_fin': {'type': 'string'}, }, }, } @@ -1979,31 +1979,31 @@ class RSA13Resource(BaseResource, HTTPResource): DEFAULTS = { 'beneficiaire_csv_columns': [ - "NUM_CAF", - "CODE_PER", - "NOM_PER", - "PRENOM_PER", - "DTNAI_PER", - "ACTIF_PER", - "CODE_PI", - "LIB_CODE_PI", - "TOPPERSDRODEVORSA", - "LIB_ETATDOSRSA", - "LIB_MOTIF_ETATDOSRSA", - "NB_JOUR_DEPUIS_ARR", - "DATE_DEB", - "DATE_1IERE_CONS", - "DATE_DERNIERE_CONSULT", - "DATE_REELLE_RDV", - "NUM_CINS", - "DATE_SIGN", - "DATE_DEB_CI", - "DATE_FIN_CI", - "REFERENT_CI", - "ACTION_EN_COURS", - "DELAI_REGUL", - "PROC_EN_COURS", - "REFERENT_AFFECTATION", + 'NUM_CAF', + 'CODE_PER', + 'NOM_PER', + 'PRENOM_PER', + 'DTNAI_PER', + 'ACTIF_PER', + 'CODE_PI', + 'LIB_CODE_PI', + 'TOPPERSDRODEVORSA', + 'LIB_ETATDOSRSA', + 'LIB_MOTIF_ETATDOSRSA', + 'NB_JOUR_DEPUIS_ARR', + 'DATE_DEB', + 'DATE_1IERE_CONS', + 'DATE_DERNIERE_CONSULT', + 'DATE_REELLE_RDV', + 'NUM_CINS', + 'DATE_SIGN', + 'DATE_DEB_CI', + 'DATE_FIN_CI', + 'REFERENT_CI', + 'ACTION_EN_COURS', + 'DELAI_REGUL', + 'PROC_EN_COURS', + 'REFERENT_AFFECTATION', 'COMPL1_ADR', 'COMPL2_ADR', 'VOIE_ADR', @@ -2013,57 +2013,57 @@ DEFAULTS = { 'INSEE_ADR', ], 'facturation_csv_columns': [ - "PLATEFORME", - "MATRICULE", - "NOM", - "PRENOM", - "DTNAI", - "GENRE", - "ROLE", - "CODE_POSTAL", - "COMMUNE", - "DATE_SIGN", - "DATE_DEB", - "DUREE", - "DATE_FIN", - "COEFFICIENT", + 'PLATEFORME', + 'MATRICULE', + 'NOM', + 'PRENOM', + 'DTNAI', + 'GENRE', + 'ROLE', + 'CODE_POSTAL', + 'COMMUNE', + 'DATE_SIGN', + 'DATE_DEB', + 'DUREE', + 'DATE_FIN', + 'COEFFICIENT', ], 'sorti_csv_columns': [ - "NUM_CAF", - "CODE_PER", - "NOM_PER", - "PRENOM_PER", - "DTNAI_PER", - "CP_PER", - "COMMUNE_PER", - "ACTIF_PER", - "CODE_PI", - "LIB_CODE_PI", - "TOPPERSDRODEVORSA", - "LIB_ETATDOSRSA", - "LIB_MOTIF_ETATDOSRSA", - "PLT_DT_DEB_AFF", - "PLT_DT_FIN_AFF", - "PLT_MOTIF_FIN_ACC", - "PLT_COMMENTAIRE_REF", - "PLT_NUM_CI", - "PLT_PLATEFORME_CI", - "PLT_OPERATEUR_CI", - "PLT_REFERENT_CI", - "PLT_DECISION_CI", - "PLT_DUREE_CI", - "PLT_DATE_DEB_CI", - "PLT_DATE_FIN_CI", - "NOUVEAU_DT_DEB_AFF", - "NOUVEAU_AFF", - "NOUVEAU_COMMENTAIRE_PI", - "NOUVEAU_NUM_CI", - "NOUVEAU_PLATEFORME_CI", - "NOUVEAU_OPERATEUR_CI", - "NOUVEAU_REFERENT_CI", - "NOUVEAU_DECISION_CI", - "NOUVEAU_DUREE_CI", - "NOUVEAU_DATE_DEB_CI", - "NOUVEAU_DATE_FIN_CI", + 'NUM_CAF', + 'CODE_PER', + 'NOM_PER', + 'PRENOM_PER', + 'DTNAI_PER', + 'CP_PER', + 'COMMUNE_PER', + 'ACTIF_PER', + 'CODE_PI', + 'LIB_CODE_PI', + 'TOPPERSDRODEVORSA', + 'LIB_ETATDOSRSA', + 'LIB_MOTIF_ETATDOSRSA', + 'PLT_DT_DEB_AFF', + 'PLT_DT_FIN_AFF', + 'PLT_MOTIF_FIN_ACC', + 'PLT_COMMENTAIRE_REF', + 'PLT_NUM_CI', + 'PLT_PLATEFORME_CI', + 'PLT_OPERATEUR_CI', + 'PLT_REFERENT_CI', + 'PLT_DECISION_CI', + 'PLT_DUREE_CI', + 'PLT_DATE_DEB_CI', + 'PLT_DATE_FIN_CI', + 'NOUVEAU_DT_DEB_AFF', + 'NOUVEAU_AFF', + 'NOUVEAU_COMMENTAIRE_PI', + 'NOUVEAU_NUM_CI', + 'NOUVEAU_PLATEFORME_CI', + 'NOUVEAU_OPERATEUR_CI', + 'NOUVEAU_REFERENT_CI', + 'NOUVEAU_DECISION_CI', + 'NOUVEAU_DUREE_CI', + 'NOUVEAU_DATE_DEB_CI', + 'NOUVEAU_DATE_FIN_CI', ], } diff --git a/passerelle/contrib/sigerly/models.py b/passerelle/contrib/sigerly/models.py index cc1dd6d7..8b4f009b 100644 --- a/passerelle/contrib/sigerly/models.py +++ b/passerelle/contrib/sigerly/models.py @@ -25,11 +25,11 @@ from passerelle.utils.jsonresponse import APIError CREATE_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', - "type": "object", + 'type': 'object', 'required': ['demandeur', 'id_typeinterv', 'id_urgence'], 'properties': { 'demandeur': { - 'description': "Nom du demandeur", + 'description': 'Nom du demandeur', 'type': 'string', }, 'id_typeinterv': { @@ -62,7 +62,7 @@ CREATE_SCHEMA = { QUERY_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', - "type": "object", + 'type': 'object', 'properties': { 'id_intervention': { 'description': 'Rechercher une intervention par son numéro' diff --git a/passerelle/contrib/solis_afi_mss/models.py b/passerelle/contrib/solis_afi_mss/models.py index 13ee395b..5adc1fca 100644 --- a/passerelle/contrib/solis_afi_mss/models.py +++ b/passerelle/contrib/solis_afi_mss/models.py @@ -25,7 +25,7 @@ from passerelle.utils.jsonresponse import APIError CONTACT_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', - "type": "object", + 'type': 'object', 'properties': { 'adresseMailPerso': { 'description': 'Private mail address', @@ -48,7 +48,7 @@ CONTACT_SCHEMA = { TAX_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', - "type": "object", + 'type': 'object', 'properties': { 'indexImposition': { 'description': 'Tax index', @@ -76,7 +76,7 @@ TAX_SCHEMA = { DEMAND_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', - "type": "object", + 'type': 'object', 'properties': { 'codeTypeAide': { 'description': 'Allowance type code', @@ -117,21 +117,21 @@ DOCUMENT_SCHEMA = { 'type': 'object', 'properties': { 'codeGedDocument': { - 'description': "Code du document dans le paramétrage GED", + 'description': 'Code du document dans le paramétrage GED', }, 'document': { 'type': 'object', 'properties': { 'filename': { - 'description': "Nom du ficher", + 'description': 'Nom du ficher', 'type': 'string', }, 'content_type': { - 'description': "Type MIME", + 'description': 'Type MIME', 'type': 'string', }, 'content': { - 'description': "Contenu", + 'description': 'Contenu', 'type': 'string', }, }, @@ -285,7 +285,7 @@ class SolisAfiMss(BaseResource, HTTPResource): display_order=5, name='update-contact', methods=['post'], - description=_("Update contact details for an agent"), + description=_('Update contact details for an agent'), parameters={ 'email': {'description': _("Agent's email address")}, }, @@ -404,7 +404,7 @@ class SolisAfiMss(BaseResource, HTTPResource): for person in results['adults'] + results['children']: for index in [x.strip() for x in post_data['individusConcernes'].split(':') if x.strip()]: if str(person['indexIndividu']) == index: - related_persons.append({"indexIndividu": index}) + related_persons.append({'indexIndividu': index}) post_data['indexAgent'] = str(results['index']) post_data['individusConcernes'] = related_persons @@ -421,7 +421,7 @@ class SolisAfiMss(BaseResource, HTTPResource): description=_('Submit a document to the GED'), parameters={ 'email': {'description': _("Agent's email address"), 'optional': True}, - 'indexAideFinanciere': {'description': _("Allowance index"), 'optional': True}, + 'indexAideFinanciere': {'description': _('Allowance index'), 'optional': True}, }, post={'request_body': {'schema': {'application/json': DOCUMENT_SCHEMA}}}, ) diff --git a/passerelle/contrib/solis_apa/conciliation.py b/passerelle/contrib/solis_apa/conciliation.py index 6467474d..9e80d0eb 100644 --- a/passerelle/contrib/solis_apa/conciliation.py +++ b/passerelle/contrib/solis_apa/conciliation.py @@ -16,7 +16,7 @@ CONCILIATION_INDIVIDU = { - 'block': {'name': 'Individu', 'pk': "PK/IndexIndividu/@V"}, + 'block': {'name': 'Individu', 'pk': 'PK/IndexIndividu/@V'}, 'criteria': { 5: ( { @@ -56,23 +56,23 @@ CONCILIATION_INDIVIDU = { 'EtatCivil/NomJeuneFille/@V': 'nom', }, 'output': [ - "Dossier/PK/IndexDossier/@V", - "PK/IndexIndividu/@V", - "EtatCivil/Nom/@V", - "EtatCivil/NomJeuneFille/@V", - "EtatCivil/Prenom/@V", - "EtatCivil/DateNaissance/@V", - "Dossier/Adresse/NumeroLieu/@V", - "Dossier/Adresse/NatureLieu/@Lc", - "Dossier/Adresse/NomLieu/@V", - "Dossier/Adresse/ComplementLieu/@V", - "Dossier/Adresse/CpLieu/@V", - "Dossier/Adresse/Commune/NomCom/@V", + 'Dossier/PK/IndexDossier/@V', + 'PK/IndexIndividu/@V', + 'EtatCivil/Nom/@V', + 'EtatCivil/NomJeuneFille/@V', + 'EtatCivil/Prenom/@V', + 'EtatCivil/DateNaissance/@V', + 'Dossier/Adresse/NumeroLieu/@V', + 'Dossier/Adresse/NatureLieu/@Lc', + 'Dossier/Adresse/NomLieu/@V', + 'Dossier/Adresse/ComplementLieu/@V', + 'Dossier/Adresse/CpLieu/@V', + 'Dossier/Adresse/Commune/NomCom/@V', ], } CONCILIATION_INDIVIDU_SANS_DN = { - 'block': {'name': 'Individu', 'pk': "PK/IndexIndividu/@V"}, + 'block': {'name': 'Individu', 'pk': 'PK/IndexIndividu/@V'}, 'criteria': { 5: ( { @@ -103,23 +103,23 @@ CONCILIATION_INDIVIDU_SANS_DN = { 'EtatCivil/NomJeuneFille/@V': 'nom', }, 'output': [ - "Dossier/PK/IndexDossier/@V", - "PK/IndexIndividu/@V", - "EtatCivil/Nom/@V", - "EtatCivil/NomJeuneFille/@V", - "EtatCivil/Prenom/@V", - "EtatCivil/DateNaissance/@V", - "Dossier/Adresse/NumeroLieu/@V", - "Dossier/Adresse/NatureLieu/@Lc", - "Dossier/Adresse/NomLieu/@V", - "Dossier/Adresse/ComplementLieu/@V", - "Dossier/Adresse/CpLieu/@V", - "Dossier/Adresse/Commune/NomCom/@V", + 'Dossier/PK/IndexDossier/@V', + 'PK/IndexIndividu/@V', + 'EtatCivil/Nom/@V', + 'EtatCivil/NomJeuneFille/@V', + 'EtatCivil/Prenom/@V', + 'EtatCivil/DateNaissance/@V', + 'Dossier/Adresse/NumeroLieu/@V', + 'Dossier/Adresse/NatureLieu/@Lc', + 'Dossier/Adresse/NomLieu/@V', + 'Dossier/Adresse/ComplementLieu/@V', + 'Dossier/Adresse/CpLieu/@V', + 'Dossier/Adresse/Commune/NomCom/@V', ], } CONCILIATION_ADRESSE = { - 'block': {'name': 'Adresse', 'pk': "CodeLieu/@V"}, + 'block': {'name': 'Adresse', 'pk': 'CodeLieu/@V'}, 'criteria': { 5: ( { @@ -135,18 +135,18 @@ CONCILIATION_ADRESSE = { 'Commune/PK/CodeCommune/@V': 'commune', }, 'output': [ - "CodeLieu/@V", - "NatureLieu/@Lc", - "NomLieu/@V", - "CodePostal/@V", - "Commune/PK/CodeCommune/@V", - "Commune/NomCom/@V", - "CodeDepartement/@V", + 'CodeLieu/@V', + 'NatureLieu/@Lc', + 'NomLieu/@V', + 'CodePostal/@V', + 'Commune/PK/CodeCommune/@V', + 'Commune/NomCom/@V', + 'CodeDepartement/@V', ], } CONCILIATION_PARTICULIER = { - 'block': {'name': 'Particulier', 'pk': "PK/IndexParticulier/@V"}, + 'block': {'name': 'Particulier', 'pk': 'PK/IndexParticulier/@V'}, 'criteria': { 5: ( { @@ -177,23 +177,23 @@ CONCILIATION_PARTICULIER = { 'EtatCivil/NomJeuneFille/@V': 'nom', }, 'output': [ - "PK/IndexParticulier/@V", - "EtatCivil/Nom/@V", - "EtatCivil/NomJeuneFille/@V", - "EtatCivil/Prenom/@V", - "EtatCivil/DateNaissance/@V", - "Adresse/NumeroLieu/@V", - "Adresse/NatureLieu/@Lc", - "Adresse/NomLieu/@V", - "Adresse/ComplementLieu/@V", - "Adresse/CpLieu/@V", - "Adresse/Commune/NomCom/@V", + 'PK/IndexParticulier/@V', + 'EtatCivil/Nom/@V', + 'EtatCivil/NomJeuneFille/@V', + 'EtatCivil/Prenom/@V', + 'EtatCivil/DateNaissance/@V', + 'Adresse/NumeroLieu/@V', + 'Adresse/NatureLieu/@Lc', + 'Adresse/NomLieu/@V', + 'Adresse/ComplementLieu/@V', + 'Adresse/CpLieu/@V', + 'Adresse/Commune/NomCom/@V', ], } def conciliation_payload(config, **data): - block = {"name": config['block']['name'], "PrimaryKey": {"key": [config['block']['pk']]}} + block = {'name': config['block']['name'], 'PrimaryKey': {'key': [config['block']['pk']]}} setting = [] for affinity, afflist in config['criteria'].items(): for aff in afflist: @@ -202,8 +202,8 @@ def conciliation_payload(config, **data): criterium.append({'key': xpath, 'operator': op}) setting.append( { - "affinity": affinity, - "Criterium": criterium, + 'affinity': affinity, + 'Criterium': criterium, } ) criterium = [] @@ -214,16 +214,16 @@ def conciliation_payload(config, **data): returndata.append(xpath) return { - "ConciliationInputWS": { - "Block": block, - "Input": { - "Settings": { - "Setting": setting, + 'ConciliationInputWS': { + 'Block': block, + 'Input': { + 'Settings': { + 'Setting': setting, }, - "Criteria": {"Criterium": criterium}, + 'Criteria': {'Criterium': criterium}, }, - "Output": { - "ReturnDatas": {"returnData": returndata}, + 'Output': { + 'ReturnDatas': {'returnData': returndata}, }, } } diff --git a/passerelle/contrib/solis_apa/integration.py b/passerelle/contrib/solis_apa/integration.py index e52a23ad..ce4144fe 100644 --- a/passerelle/contrib/solis_apa/integration.py +++ b/passerelle/contrib/solis_apa/integration.py @@ -473,13 +473,13 @@ def get_info_bancaire(fields, wf): banque_cle = wf.get('supp8_var_clerib') or fields.get('clerib') return { - "titulaireCompte": banque_titulaire, - "domiciliation": banque_domiciliation, - "codeBanque": banque_codebanque, - "codeGuichet": banque_codeguichet, - "numeroCompte": banque_numero, - "cleRib": banque_cle, - "modeReglement": 1, + 'titulaireCompte': banque_titulaire, + 'domiciliation': banque_domiciliation, + 'codeBanque': banque_codebanque, + 'codeGuichet': banque_codeguichet, + 'numeroCompte': banque_numero, + 'cleRib': banque_cle, + 'modeReglement': 1, } @@ -524,36 +524,36 @@ def get_patrimoine(fields, wf): if immobilier_bati > 0 and revenus_annee: message.append( { - "Nomenclature": {"indexFamille": 2, "indexNature": 1}, - "anneeReference": int(revenus_annee), - "valeurPrelevement": immobilier_bati, + 'Nomenclature': {'indexFamille': 2, 'indexNature': 1}, + 'anneeReference': int(revenus_annee), + 'valeurPrelevement': immobilier_bati, } ) if immobilier_non_bati > 0 and revenus_annee: message.append( { - "Nomenclature": {"indexFamille": 2, "indexNature": 2}, - "anneeReference": int(revenus_annee), - "valeurPrelevement": immobilier_non_bati, + 'Nomenclature': {'indexFamille': 2, 'indexNature': 2}, + 'anneeReference': int(revenus_annee), + 'valeurPrelevement': immobilier_non_bati, } ) if prelevements > 0 and revenus_annee: message.append( { - "Nomenclature": {"indexFamille": 3, "indexNature": 1}, - "anneeReference": int(revenus_annee), - "valeurPrelevement": prelevements, + 'Nomenclature': {'indexFamille': 3, 'indexNature': 1}, + 'anneeReference': int(revenus_annee), + 'valeurPrelevement': prelevements, } ) if fonciers > 0 and fonciers_annee: message.append( { - "Nomenclature": {"indexFamille": 4, "indexNature": 1}, - "anneeReference": int(fonciers_annee), - "valeurPrelevement": fonciers, + 'Nomenclature': {'indexFamille': 4, 'indexNature': 1}, + 'anneeReference': int(fonciers_annee), + 'valeurPrelevement': fonciers, } ) diff --git a/passerelle/contrib/solis_apa/models.py b/passerelle/contrib/solis_apa/models.py index 876fd2cd..9ebe321c 100644 --- a/passerelle/contrib/solis_apa/models.py +++ b/passerelle/contrib/solis_apa/models.py @@ -87,11 +87,11 @@ class SolisAPA(BaseResource): data['ReferentialOptions']['Filters'] = {'Filter': solis_filters} if attributes: data['ReferentialOptions']['Attributes'] = { - "referential": [ + 'referential': [ { - "schema": "stdr", - "table": referential, - "field": attributes, + 'schema': 'stdr', + 'table': referential, + 'field': attributes, } ] } diff --git a/passerelle/contrib/solis_apa/suivi.py b/passerelle/contrib/solis_apa/suivi.py index 9538d60c..3ae30365 100644 --- a/passerelle/contrib/solis_apa/suivi.py +++ b/passerelle/contrib/solis_apa/suivi.py @@ -18,16 +18,16 @@ import datetime import json PAYLOAD = { - "visite": {}, - "plan-aide": {"DemandeAsg": {"DateDebut": "%(datedebut)s", "DateFin": "%(datefin)s"}}, - "presentation-commission": {"OrdreJourAsg": {"DateDebut": "%(datedebut)s", "DateFin": "%(datefin)s"}}, - "decision-commission": { - "DemandeAsg": { - "EtatDecision": "R", - "DateDebutNotification": "%(datedebut)s", - "DateFinNotification": "%(datefin)s", - "DateDebutDecision": "%(datedebut)s", - "DateFinDecision": "%(datefin)s", + 'visite': {}, + 'plan-aide': {'DemandeAsg': {'DateDebut': '%(datedebut)s', 'DateFin': '%(datefin)s'}}, + 'presentation-commission': {'OrdreJourAsg': {'DateDebut': '%(datedebut)s', 'DateFin': '%(datefin)s'}}, + 'decision-commission': { + 'DemandeAsg': { + 'EtatDecision': 'R', + 'DateDebutNotification': '%(datedebut)s', + 'DateFinNotification': '%(datefin)s', + 'DateDebutDecision': '%(datedebut)s', + 'DateFinDecision': '%(datefin)s', } }, } diff --git a/passerelle/contrib/toulouse_axel/models.py b/passerelle/contrib/toulouse_axel/models.py index b6550d6e..a2734c69 100644 --- a/passerelle/contrib/toulouse_axel/models.py +++ b/passerelle/contrib/toulouse_axel/models.py @@ -131,7 +131,7 @@ class ToulouseAxel(BaseResource): cache.set(cache_key, management_dates, 3600) # 1 hour return management_dates - @endpoint(display_order=4, description=_("Get dates of the update management")) + @endpoint(display_order=4, description=_('Get dates of the update management')) def management_dates(self, request): return {'data': self.get_management_dates()} @@ -219,7 +219,7 @@ class ToulouseAxel(BaseResource): @endpoint( display_category='DUI', display_order=3, - description=_("Check DUI status"), + description=_('Check DUI status'), parameters={ 'NameID': {'description': _('Publik ID')}, }, @@ -262,7 +262,7 @@ class ToulouseAxel(BaseResource): @endpoint( display_order=5, - description=_("Get a referential"), + description=_('Get a referential'), pattern=r'^(?P[\w-]+)/?$', example_pattern='{code}', parameters={ @@ -353,7 +353,7 @@ class ToulouseAxel(BaseResource): @endpoint( display_category='DUI', display_order=5, - description=_("Get information about children"), + description=_('Get information about children'), parameters={ 'NameID': {'description': _('Publik ID')}, }, @@ -366,7 +366,7 @@ class ToulouseAxel(BaseResource): @endpoint( display_category='DUI', display_order=6, - description=_("Get information about a child"), + description=_('Get information about a child'), parameters={ 'NameID': {'description': _('Publik ID')}, 'idpersonne': {'description': _('Child ID')}, @@ -404,7 +404,7 @@ class ToulouseAxel(BaseResource): @endpoint( display_category='DUI', display_order=7, - description=_("Get information about children contacts"), + description=_('Get information about children contacts'), parameters={ 'NameID': {'description': _('Publik ID')}, }, @@ -731,7 +731,7 @@ class ToulouseAxel(BaseResource): name='regie', pattern=r'^(?P[\w-]+)/invoices/?$', example_pattern='{regie_id}/invoices', - description=_("Get invoices to pay"), + description=_('Get invoices to pay'), parameters={ 'NameID': {'description': _('Publik ID'), 'blank': False}, 'regie_id': {'description': _('Regie identifier'), 'example_value': '42-PERISCOL'}, @@ -747,7 +747,7 @@ class ToulouseAxel(BaseResource): name='regie', pattern=r'^(?P[\w-]+)/invoices/history/?$', example_pattern='{regie_id}/invoices/history', - description=_("Get invoices already paid"), + description=_('Get invoices already paid'), parameters={ 'NameID': {'description': _('Publik ID'), 'blank': False}, 'regie_id': {'description': _('Regie identifier'), 'example_value': '42-PERISCOL'}, @@ -1046,7 +1046,7 @@ class ToulouseAxel(BaseResource): @endpoint( display_category='CAN-CLA', display_order=1, - description=_("Get the list of reference years available for bookings"), + description=_('Get the list of reference years available for bookings'), parameters={ 'NameID': {'description': _('Publik ID')}, 'pivot_date': { @@ -1106,7 +1106,7 @@ class ToulouseAxel(BaseResource): @endpoint( display_category='CAN-CLA', display_order=2, - description=_("Get information about CLAE activities of all children for the year"), + description=_('Get information about CLAE activities of all children for the year'), parameters={ 'NameID': {'description': _('Publik ID')}, 'booking_date': {'description': _('Booking date (to get reference year)')}, @@ -1129,7 +1129,7 @@ class ToulouseAxel(BaseResource): @endpoint( display_category='CAN-CLA', display_order=3, - description=_("Get the list of CLAE booked activities of a child, for a period"), + description=_('Get the list of CLAE booked activities of a child, for a period'), parameters={ 'NameID': {'description': _('Publik ID')}, 'idpersonne': {'description': _('Child ID')}, @@ -1206,7 +1206,7 @@ class ToulouseAxel(BaseResource): @endpoint( display_category='CAN-CLA', display_order=4, - description=_("Get possible days to book an activity of a child, for a period"), + description=_('Get possible days to book an activity of a child, for a period'), parameters={ 'NameID': {'description': _('Publik ID')}, 'idpersonne': {'description': _('Child ID')}, @@ -1292,7 +1292,7 @@ class ToulouseAxel(BaseResource): @endpoint( display_category='CAN-CLA', display_order=5, - description=_("Get annual possible days to book an activity of a child"), + description=_('Get annual possible days to book an activity of a child'), parameters={ 'NameID': {'description': _('Publik ID')}, 'idpersonne': {'description': _('Child ID')}, @@ -1347,7 +1347,7 @@ class ToulouseAxel(BaseResource): @endpoint( display_category='CAN-CLA', display_order=6, - description=_("Get booked days for an activity of a child, for a period"), + description=_('Get booked days for an activity of a child, for a period'), parameters={ 'NameID': {'description': _('Publik ID')}, 'idpersonne': {'description': _('Child ID')}, @@ -1365,7 +1365,7 @@ class ToulouseAxel(BaseResource): @endpoint( display_category='CAN-CLA', display_order=7, - description=_("CLAE/Cantine booking"), + description=_('CLAE/Cantine booking'), parameters={ 'NameID': {'description': _('Publik ID')}, }, @@ -1516,7 +1516,7 @@ class ToulouseAxel(BaseResource): @endpoint( display_category='CAN-CLA', display_order=8, - description=_("CLAE/Cantine annual booking"), + description=_('CLAE/Cantine annual booking'), parameters={ 'NameID': {'description': _('Publik ID')}, }, diff --git a/passerelle/contrib/toulouse_foederis/models.py b/passerelle/contrib/toulouse_foederis/models.py index 43d5df1a..b9ad4246 100644 --- a/passerelle/contrib/toulouse_foederis/models.py +++ b/passerelle/contrib/toulouse_foederis/models.py @@ -58,15 +58,15 @@ ATTACHMENT_SCHEMA = { 'required': ['filename', 'content_type', 'content'], 'properties': { 'filename': { - 'description': _("File name"), + 'description': _('File name'), 'type': 'string', }, 'content_type': { - 'description': _("MIME type"), + 'description': _('MIME type'), 'type': 'string', }, 'content': { - 'description': _("Content"), + 'description': _('Content'), 'type': 'string', }, }, @@ -567,10 +567,10 @@ class Resource(BaseResource, HTTPResource): return HttpResponse(fd, content_type='application/pdf') @endpoint( - name="create-application", + name='create-application', post={ - "description": _("Creates an application"), - "request_body": {"schema": {"application/json": APPLICATION_SCHEMA}}, + 'description': _('Creates an application'), + 'request_body': {'schema': {'application/json': APPLICATION_SCHEMA}}, }, ) def create_application(self, request, post_data): @@ -642,22 +642,22 @@ class Resource(BaseResource, HTTPResource): @endpoint( name='attach-file', post={ - "description": _("Attach a file to an application."), - "request_body": {"schema": {"application/json": ATTACHMENT_SCHEMA}}, + 'description': _('Attach a file to an application.'), + 'request_body': {'schema': {'application/json': ATTACHMENT_SCHEMA}}, }, ) def attach_file(self, request, post_data): - application_id = post_data["application_id"] - attachment_name = post_data["name"] - file = post_data["file"] + application_id = post_data['application_id'] + attachment_name = post_data['name'] + file = post_data['file'] self.http_request( 'POST', f'data/candidature/{application_id}/fields/{attachment_name}?viewIntegrationName=api_publik', json={ - "contentType": file["content_type"], - "value": file["content"], - "fileName": file["filename"], + 'contentType': file['content_type'], + 'value': file['content'], + 'fileName': file['filename'], }, ) diff --git a/passerelle/contrib/toulouse_maelis/activity_schemas.py b/passerelle/contrib/toulouse_maelis/activity_schemas.py index cbd822c8..0f89a1f8 100644 --- a/passerelle/contrib/toulouse_maelis/activity_schemas.py +++ b/passerelle/contrib/toulouse_maelis/activity_schemas.py @@ -183,7 +183,7 @@ SUBSCRIPTION_SCHEMA = { }, }, 'form_api_url': { - 'description': "Adresse de la vue API du formulaire Publik : {{ form_api_url }}", + 'description': 'Adresse de la vue API du formulaire Publik : {{ form_api_url }}', 'oneOf': [ {'type': 'string'}, {'type': 'null'}, diff --git a/passerelle/contrib/toulouse_maelis/family_schemas.py b/passerelle/contrib/toulouse_maelis/family_schemas.py index 19d3107c..85035937 100644 --- a/passerelle/contrib/toulouse_maelis/family_schemas.py +++ b/passerelle/contrib/toulouse_maelis/family_schemas.py @@ -115,7 +115,7 @@ AUTHORIZED_PERSON_SCHEMA = { BIRTH_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', 'title': 'Birth info', - 'description': "Informations relatives à la naissance", + 'description': 'Informations relatives à la naissance', 'type': 'object', 'required': ['dateBirth'], 'properties': { @@ -156,7 +156,7 @@ INDICATOR_SCHEMA = { 'pattern': '.+', }, 'note': { - 'description': "Commentaire pour les indicateurs de type NOTE", + 'description': 'Commentaire pour les indicateurs de type NOTE', 'oneOf': [{'type': 'string'}, {'type': 'null'}], }, 'isActive': { @@ -176,7 +176,7 @@ ID_PROPERTIES = { 'type': 'string', }, 'maidenName': { - 'description': "Nom de jeune fille ", + 'description': 'Nom de jeune fille ', 'oneOf': [{'type': 'string'}, {'type': 'null'}], }, 'birth': BIRTH_SCHEMA, @@ -305,7 +305,7 @@ PROFESSION_SCHEMA = { 'oneOf': [{'type': 'string'}, {'type': 'null'}], }, 'weeklyHours': { - 'description': "horaires de travail hebdomadaire", + 'description': 'horaires de travail hebdomadaire', 'oneOf': [{'type': 'string'}, {'type': 'null'}], }, 'addressPro': ADDRESSPROF_SCHEMA, @@ -332,7 +332,7 @@ CAFINFO_SCHEMA = { RLINFO_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', 'title': 'RL', - 'description': "Informations sur le responsable légal", + 'description': 'Informations sur le responsable légal', 'type': 'object', 'required': ['firstname', 'lastname', 'civility', 'quality', 'birth', 'adresse'], 'properties': { @@ -389,7 +389,7 @@ DOCTORADDRESS_SCHEMA = { FAMILYDOCTOR_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', 'title': 'Family doctor', - 'description': "Informations sur le docteur", + 'description': 'Informations sur le docteur', 'type': 'object', 'properties': { 'name': { @@ -407,7 +407,7 @@ FAMILYDOCTOR_SCHEMA = { VACCIN_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', 'title': 'Vaccin', - 'description': "Informations sur le vaccin", + 'description': 'Informations sur le vaccin', 'type': 'object', 'required': ['code', 'vaccinationDate'], 'properties': { @@ -426,7 +426,7 @@ VACCIN_SCHEMA = { MEDICALRECORD_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', 'title': 'Medical record', - 'description': "Informations médicales", + 'description': 'Informations médicales', 'oneOf': [ { 'type': 'object', @@ -484,7 +484,7 @@ MEDICALRECORD_SCHEMA = { PAIINFO_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', 'title': 'PAI', - 'description': "Informations médicales", + 'description': 'Informations médicales', 'type': 'object', 'required': ['code'], 'properties': { @@ -724,7 +724,7 @@ UPDATE_INDICATOR_SCHEMA = { UPDATE_QUOTIENT_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', 'title': 'Family persons', - 'description': "Mise à jours des quotients sur les responsables légaux", + 'description': 'Mise à jours des quotients sur les responsables légaux', 'type': 'object', 'required': ['yearRev', 'dateStart', 'dateEnd', 'mtt', 'cdquo'], 'properties': { @@ -765,15 +765,15 @@ WCS_FILE_SCHEMA = { 'required': ['filename', 'content_type', 'content'], 'properties': { 'filename': { - 'description': "Nom du ficher", + 'description': 'Nom du ficher', 'type': 'string', }, 'content_type': { - 'description': "Type MIME", + 'description': 'Type MIME', 'type': 'string', }, 'content': { - 'description': "Contenu", + 'description': 'Contenu', 'type': 'string', }, }, @@ -840,7 +840,7 @@ SCHOOL_PRE_REGISTRATION_SCHEMA = { 'type': 'string', }, 'schoolYear': { - 'description': "Année scolaire", + 'description': 'Année scolaire', 'type': 'string', }, 'dateSubscribe': { @@ -848,7 +848,7 @@ SCHOOL_PRE_REGISTRATION_SCHEMA = { 'type': 'string', }, 'levelCode': { - 'description': "Le code du niveau scolaire", + 'description': 'Le code du niveau scolaire', 'type': 'string', }, }, @@ -876,7 +876,7 @@ SCHOOL_PRE_REGISTRATION_WITH_EXEMPTION_SCHEMA = { 'type': 'string', }, 'schoolYear': { - 'description': "Année scolaire", + 'description': 'Année scolaire', 'type': 'string', }, 'datePresubscribe': { @@ -884,7 +884,7 @@ SCHOOL_PRE_REGISTRATION_WITH_EXEMPTION_SCHEMA = { 'type': 'string', }, 'levelCode': { - 'description': "Le code du niveau scolaire", + 'description': 'Le code du niveau scolaire', 'type': 'string', }, 'idRequestSchool1': { @@ -900,11 +900,11 @@ SCHOOL_PRE_REGISTRATION_WITH_EXEMPTION_SCHEMA = { 'type': 'string', }, 'derogReasonCode': { - 'description': "Code du motif de dérogation", + 'description': 'Code du motif de dérogation', 'type': 'string', }, 'derogComment': { - 'description': "Commentaire relatif à la dérogation", + 'description': 'Commentaire relatif à la dérogation', 'type': 'string', }, }, @@ -932,7 +932,7 @@ SCHOOL_PRE_REGISTRATION_WITH_SIBLING_SCHEMA = { 'type': 'string', }, 'schoolYear': { - 'description': "Année scolaire", + 'description': 'Année scolaire', 'type': 'string', }, 'datePresubscribe': { @@ -940,7 +940,7 @@ SCHOOL_PRE_REGISTRATION_WITH_SIBLING_SCHEMA = { 'type': 'string', }, 'levelCode': { - 'description': "Le code du niveau scolaire", + 'description': 'Le code du niveau scolaire', 'type': 'string', }, 'idSchoolRequested': { @@ -948,7 +948,7 @@ SCHOOL_PRE_REGISTRATION_WITH_SIBLING_SCHEMA = { 'type': 'string', }, 'numPersonSibling': { - 'description': "Identifiant du membre de la fratrie pour lequel le rapprochement est demandé", + 'description': 'Identifiant du membre de la fratrie pour lequel le rapprochement est demandé', 'type': 'string', }, }, diff --git a/passerelle/contrib/toulouse_maelis/invoice_schemas.py b/passerelle/contrib/toulouse_maelis/invoice_schemas.py index fb5ed498..188d4508 100644 --- a/passerelle/contrib/toulouse_maelis/invoice_schemas.py +++ b/passerelle/contrib/toulouse_maelis/invoice_schemas.py @@ -17,7 +17,7 @@ PERSON_BANK_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', 'title': 'Person bank', - 'description': "Coordonnées du compte à débiter (coordonnées bancaires)", + 'description': 'Coordonnées du compte à débiter (coordonnées bancaires)', 'type': 'object', 'required': ['bankBIC', 'bankIBAN', 'bankRUM', 'dateStart', 'bankAddress', 'lastName', 'firstName'], 'properties': { diff --git a/passerelle/contrib/toulouse_maelis/models.py b/passerelle/contrib/toulouse_maelis/models.py index 9aee196f..e1c9504e 100644 --- a/passerelle/contrib/toulouse_maelis/models.py +++ b/passerelle/contrib/toulouse_maelis/models.py @@ -89,7 +89,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): ) max_payment_delay = models.PositiveIntegerField( default='20', - verbose_name="Délai maximum pour payer une facture via Lingo (en minutes)", + verbose_name='Délai maximum pour payer une facture via Lingo (en minutes)', ) category = 'Connecteurs métiers' @@ -538,13 +538,13 @@ class ToulouseMaelis(BaseResource, HTTPResource): def assert_key_in_referential(self, referential_name, key_value, keys_text, required=True): if not key_value: if required: - raise APIError("%s is required and could not be None" % keys_text) + raise APIError('%s is required and could not be None' % keys_text) return try: self.referential.get(referential_name=referential_name, item_id=key_value) except Referential.DoesNotExist: - ref_text = "required " if required else "" - ref_text = ref_text + "referential" + ref_text = 'required ' if required else '' + ref_text = ref_text + 'referential' raise APIError( "%s key value '%s' do not belong to '%s' %s" % (keys_text, key_value, referential_name, ref_text) @@ -993,7 +993,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Famille', - description="Lister les situations professionnelles", + description='Lister les situations professionnelles', name='read-professional-situation-list', parameters={ 'id': {'description': 'Identifiant de l’enregistrement'}, @@ -1141,7 +1141,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Famille', - description="Rechercher un dossier famille", + description='Rechercher un dossier famille', name='search-family', parameters={ 'q': {'description': 'Recherche en texte intégral'}, @@ -1164,7 +1164,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Famille', - description="Rechercher un dossier famille par son numéro de DUI", + description='Rechercher un dossier famille par son numéro de DUI', name='search-family-dui', parameters={ 'q': {'description': 'Numéro de DUI'}, @@ -1199,7 +1199,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Famille', - description="Lister les responsables légaux", + description='Lister les responsables légaux', name='read-rl-list', parameters={ 'NameID': {'description': 'Publik NameID'}, @@ -1245,7 +1245,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Famille', - description="Lister les enfants", + description='Lister les enfants', name='read-child-list', parameters={ 'NameID': {'description': 'Publik NameID'}, @@ -1262,7 +1262,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Famille', - description="Lister les enfants et les responsables légaux", + description='Lister les enfants et les responsables légaux', name='read-rl-and-child-list', parameters={ 'NameID': {'description': 'Publik NameID'}, @@ -1317,7 +1317,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Famille', - description="Obtenir les informations sur un responsable légal", + description='Obtenir les informations sur un responsable légal', name='read-rl', parameters={ 'rl_id': {'description': 'Numéro du responsable légal'}, @@ -1357,7 +1357,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Famille', - description="Obtenir les informations sur un enfant", + description='Obtenir les informations sur un enfant', name='read-child', parameters={ 'child_id': {'description': "Numéro de l'enfant"}, @@ -1410,17 +1410,17 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Famille', - description="Lister les activités auxquelles un RL ou un enfant est inscrit", + description='Lister les activités auxquelles un RL ou un enfant est inscrit', name='read-subscribe-activity-list', parameters={ 'person_id': {'description': "Numéro du responsable légal ou de l'enfant"}, 'NameID': {'description': 'Publik NameID'}, 'family_id': {'description': 'Numéro de DUI'}, 'nature': { - 'description': "Natures des activités : PERICSO, EXTRASCO ou LOISIR (toutes par défaut)", + 'description': 'Natures des activités : PERICSO, EXTRASCO ou LOISIR (toutes par défaut)', }, 'type_ids': { - 'description': "Codes des types des activités (tous par défaut), séparés par des virgules", + 'description': 'Codes des types des activités (tous par défaut), séparés par des virgules', 'example_value': 'ACCSOIR,RESTSCOL', }, 'school_year': { @@ -1636,7 +1636,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Famille', - description="Ajouter un enfant", + description='Ajouter un enfant', name='create-child', parameters={ 'NameID': {'description': 'Publik NameID'}, @@ -1667,7 +1667,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Famille', - description="Modifier un enfant", + description='Modifier un enfant', name='update-child', parameters={ 'child_id': {'description': "Numéro de l'enfant"}, @@ -1744,7 +1744,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): description="Créer ou mettre à jour le quotient d'un responsable légal", name='update-quotient', parameters={ - 'rl_id': {'description': "Numéro du responsable légal"}, + 'rl_id': {'description': 'Numéro du responsable légal'}, 'NameID': {'description': 'Publik NameID'}, 'family_id': {'description': 'Numéro de DUI'}, }, @@ -2024,7 +2024,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Famille', - description="Ajouter une vaccination à un enfant", + description='Ajouter une vaccination à un enfant', name='update-child-add-vaccination', parameters={ 'child_id': {'description': "Numéro de l'enfant"}, @@ -2093,7 +2093,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Famille', - description="Savoir si un document déjà ajouté est encore valable", + description='Savoir si un document déjà ajouté est encore valable', name='read-supplied-document-validity', parameters={ 'NameID': {'description': 'Publik NameID'}, @@ -2101,7 +2101,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): 'numPerson': {'description': "Numéro du responsable légal ou de l'enfant"}, 'code': {'description': 'Code de la pièce'}, 'ref_date': { - 'description': "Date de référence, utilisée pour déduire la validité", + 'description': 'Date de référence, utilisée pour déduire la validité', 'type': 'date', }, }, @@ -2475,7 +2475,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Facture', - description="Ajouter une autorisation de prélèvement", + description='Ajouter une autorisation de prélèvement', name='add-rl1-direct-debit-order', parameters={ 'NameID': {'description': 'Publik NameID'}, @@ -2522,7 +2522,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Lister les années scolaires", + description='Lister les années scolaires', name='read-school-years-list', ) def read_school_years_list(self, request): @@ -2530,7 +2530,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Lister les niveaux scolaires", + description='Lister les niveaux scolaires', name='read-school-levels-list', parameters={ 'age': {'description': 'Âge de l\'enfant', 'example_value': '6'}, @@ -2544,7 +2544,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Lister les motifs de dérogation", + description='Lister les motifs de dérogation', name='read-exemption-reasons-list', ) def read_exemption_reasons_list(self, request): @@ -2552,7 +2552,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Lister les écoles pour une adresse et niveau scolaire", + description='Lister les écoles pour une adresse et niveau scolaire', name='read-schools-for-address-and-level', parameters={ 'year': {'description': 'Année', 'example_value': '2022'}, @@ -2580,7 +2580,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Lister les écoles pour un enfant et niveau scolaire", + description='Lister les écoles pour un enfant et niveau scolaire', name='read-schools-for-child-and-level', parameters={ 'year': {'description': 'Année', 'example_value': '2023'}, @@ -2634,7 +2634,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Créer une pré-inscription scolaire pour un enfant", + description='Créer une pré-inscription scolaire pour un enfant', name='create-child-school-pre-registration', post={ 'request_body': {'schema': {'application/json': family_schemas.SCHOOL_PRE_REGISTRATION_SCHEMA}} @@ -2646,7 +2646,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Créer une pré-inscription scolaire avec demande de dérogation", + description='Créer une pré-inscription scolaire avec demande de dérogation', name='create-child-school-pre-registration-with-exemption', post={ 'request_body': { @@ -2660,7 +2660,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Créer une pré-inscription scolaire avec rapprochement de fratrie", + description='Créer une pré-inscription scolaire avec rapprochement de fratrie', name='create-child-school-pre-registration-with-sibling', post={ 'request_body': { @@ -2674,7 +2674,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Obtenir le catalogue des activités loisir, avec leurs critères de recherche", + description='Obtenir le catalogue des activités loisir, avec leurs critères de recherche', name='read-activity-list', parameters={ 'ref_date': { @@ -2802,16 +2802,16 @@ class ToulouseMaelis(BaseResource, HTTPResource): parameters={ 'person_id': {'description': "Numéro du responsable légal ou de l'enfant"}, 'nature': { - 'description': "Nature des activités : EXTRASCO ou LOISIR (toutes par défaut)", + 'description': 'Nature des activités : EXTRASCO ou LOISIR (toutes par défaut)', }, 'type_ids': { - 'description': "Codes des types des activités, séparées par des virgules", + 'description': 'Codes des types des activités, séparées par des virgules', 'example_value': 'EXTMERC,EXTVAC', }, 'start_date': {'description': 'Début de la période'}, 'end_date': {'description': 'Fin de la période'}, 'text_template': { - 'description': "Gabarit utilisé pour la valeur text (URL encoding)", + 'description': 'Gabarit utilisé pour la valeur text (URL encoding)', 'example_value': '{{ activity.libelle2 }}', }, }, @@ -2975,24 +2975,24 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Obtenir le catalogue geojson des activités pour une personne", + description='Obtenir le catalogue geojson des activités pour une personne', name='get-person-catalog-geojson', parameters={ 'NameID': {'description': 'Publik NameID'}, 'family_id': {'description': 'Numéro de DUI'}, 'person_id': {'description': "Numéro du responsable légal ou de l'enfant"}, 'nature': { - 'description': "Nature des activités : EXTRASCO ou LOISIR (toutes par défaut)", + 'description': 'Nature des activités : EXTRASCO ou LOISIR (toutes par défaut)', }, 'type_ids': { - 'description': "Codes des types des activités, séparées par des virgules", + 'description': 'Codes des types des activités, séparées par des virgules', 'example_value': 'EXTMERC,EXTVAC', }, 'start_date': {'description': 'Début de la période'}, 'end_date': {'description': 'Fin de la période'}, 'activity_id': {'description': "Numéro de l'activité"}, 'unit_id': {'description': "Numéro de l'unité"}, - 'place_id': {'description': "Numéro du lieu"}, + 'place_id': {'description': 'Numéro du lieu'}, }, ) def get_person_catalog_geojson( @@ -3096,11 +3096,11 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Lister les natures des activités", + description='Lister les natures des activités', name='read-activity-nature-list', parameters={ 'nature_ids': { - 'description': "Codes des natures des activités (tous par défaut), séparées par des virgules", + 'description': 'Codes des natures des activités (tous par défaut), séparées par des virgules', 'example_value': 'P,1,2', }, }, @@ -3127,7 +3127,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Lister les directions de la ville", + description='Lister les directions de la ville', name='read-direction-list', ) def read_direction_list(self, request): @@ -3135,9 +3135,9 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Lister les services de la ville", + description='Lister les services de la ville', name='read-service-list', - parameters={'direction_id': {'description': "Numéro de la direction sur laquelle filtrer"}}, + parameters={'direction_id': {'description': 'Numéro de la direction sur laquelle filtrer'}}, ) def read_service_list(self, request, direction_id=None): queryset = self.referential.filter(referential_name='Service') @@ -3147,7 +3147,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Lister les indicateurs pour les activités petite enfance", + description='Lister les indicateurs pour les activités petite enfance', name='read-ape-indicators-list', ) def read_ape_indicators_list(self, request, level=None): @@ -3211,7 +3211,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): 'person_id': {'description': "Numéro du responsable légal ou de l'enfant"}, 'activity_id': {'description': "Numéro de l'activité"}, 'unit_id': {'description': "Numéro de l'unité"}, - 'place_id': {'description': "Numéro du lieu"}, + 'place_id': {'description': 'Numéro du lieu'}, 'NameID': {'description': 'Publik NameID'}, 'family_id': {'description': 'Numéro de DUI'}, 'ref_date': {'description': 'Date du début du calcul'}, @@ -3367,7 +3367,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Ajouter au panier une inscription extra-scolaire ou loisir", + description='Ajouter au panier une inscription extra-scolaire ou loisir', name='add-person-basket-subscription', post={ 'request_body': { @@ -3440,7 +3440,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Ajouter une inscription extra-scolaire ou loisir", + description='Ajouter une inscription extra-scolaire ou loisir', name='add-person-subscription', post={ 'request_body': { @@ -3677,7 +3677,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Obtenir les paniers de la famille", + description='Obtenir les paniers de la famille', name='get-baskets', parameters={ 'NameID': {'description': 'Publik NameID'}, @@ -3697,7 +3697,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Prolonger la durée de vie du panier", + description='Prolonger la durée de vie du panier', name='update-basket-time', parameters={ 'NameID': {'description': 'Publik NameID'}, @@ -3714,7 +3714,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Supprimer une ligne du panier", + description='Supprimer une ligne du panier', name='delete-basket-line', parameters={ 'NameID': {'description': 'Publik NameID'}, @@ -3742,7 +3742,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Supprimer le panier de la famille", + description='Supprimer le panier de la famille', name='delete-basket', parameters={ 'NameID': {'description': 'Publik NameID'}, @@ -3766,7 +3766,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Valider le panier de la famille", + description='Valider le panier de la famille', name='validate-basket', parameters={ 'NameID': {'description': 'Publik NameID'}, @@ -3803,7 +3803,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Lister les crèches", + description='Lister les crèches', name='read-nursery-list', parameters={ 'activity_type': {'description': "Type de l'activité.", 'example_value': 'CRECHCO'}, @@ -3831,7 +3831,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Obtenir un geojson avec la liste des crèches", + description='Obtenir un geojson avec la liste des crèches', name='get-nursery-geojson', parameters={ 'activity_type': {'description': "Type de l'activité.", 'example_value': 'CRECHCO'}, @@ -3879,7 +3879,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Inscriptions', - description="Créer une demande de place en crèche pour un enfant", + description='Créer une demande de place en crèche pour un enfant', name='create-nursery-demand', post={'request_body': {'schema': {'application/json': family_schemas.NURSERY_DEMAND_SCHEMA}}}, ) @@ -3957,7 +3957,7 @@ class ToulouseMaelis(BaseResource, HTTPResource): @endpoint( display_category='Facture', - description="Lister les régies", + description='Lister les régies', name='read-regie-list', ) def read_regie_list(self, request): @@ -4444,7 +4444,7 @@ class Subscription(models.Model): if self.status() == 'removed': self.wcs_trigger_payload['err_desc'] = "Le panier n'a pas été validé" if self.status() == 'cancelled': - self.wcs_trigger_payload['err_desc'] = "La facture a été annulée" + self.wcs_trigger_payload['err_desc'] = 'La facture a été annulée' self.save() self.resource.add_job( 'trigger_subscription_job', diff --git a/passerelle/contrib/toulouse_maelis/tools/add_person_unit_basket.py b/passerelle/contrib/toulouse_maelis/tools/add_person_unit_basket.py index 8355d4f4..3b4203a6 100755 --- a/passerelle/contrib/toulouse_maelis/tools/add_person_unit_basket.py +++ b/passerelle/contrib/toulouse_maelis/tools/add_person_unit_basket.py @@ -31,7 +31,7 @@ def check(args): print(result) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/add_person_unit_subscribe.py b/passerelle/contrib/toulouse_maelis/tools/add_person_unit_subscribe.py index 365d2069..54638ce1 100755 --- a/passerelle/contrib/toulouse_maelis/tools/add_person_unit_subscribe.py +++ b/passerelle/contrib/toulouse_maelis/tools/add_person_unit_subscribe.py @@ -31,7 +31,7 @@ def check(args): print(result) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/add_supplied_document.py b/passerelle/contrib/toulouse_maelis/tools/add_supplied_document.py index d69bfe11..bbd7837e 100755 --- a/passerelle/contrib/toulouse_maelis/tools/add_supplied_document.py +++ b/passerelle/contrib/toulouse_maelis/tools/add_supplied_document.py @@ -40,7 +40,7 @@ def check(args): print(result) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/cancel_invoice_and_delete_subscribe_list.py b/passerelle/contrib/toulouse_maelis/tools/cancel_invoice_and_delete_subscribe_list.py index f3cfa07c..a0d86e94 100755 --- a/passerelle/contrib/toulouse_maelis/tools/cancel_invoice_and_delete_subscribe_list.py +++ b/passerelle/contrib/toulouse_maelis/tools/cancel_invoice_and_delete_subscribe_list.py @@ -15,7 +15,7 @@ def check(args): print(result) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/check_wsdl.py b/passerelle/contrib/toulouse_maelis/tools/check_wsdl.py index c3ac6239..61708691 100755 --- a/passerelle/contrib/toulouse_maelis/tools/check_wsdl.py +++ b/passerelle/contrib/toulouse_maelis/tools/check_wsdl.py @@ -66,7 +66,7 @@ def check(args): sys.exit(returncode) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/delete_basket.py b/passerelle/contrib/toulouse_maelis/tools/delete_basket.py index e63e928e..075e851a 100755 --- a/passerelle/contrib/toulouse_maelis/tools/delete_basket.py +++ b/passerelle/contrib/toulouse_maelis/tools/delete_basket.py @@ -18,7 +18,7 @@ def check(args): print(results) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/delete_basket_line.py b/passerelle/contrib/toulouse_maelis/tools/delete_basket_line.py index 702707af..c9d8d2d8 100755 --- a/passerelle/contrib/toulouse_maelis/tools/delete_basket_line.py +++ b/passerelle/contrib/toulouse_maelis/tools/delete_basket_line.py @@ -17,7 +17,7 @@ def check(args): print(results) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/delete_subscribe_activity.py b/passerelle/contrib/toulouse_maelis/tools/delete_subscribe_activity.py index fbd5ba04..77f41b3b 100755 --- a/passerelle/contrib/toulouse_maelis/tools/delete_subscribe_activity.py +++ b/passerelle/contrib/toulouse_maelis/tools/delete_subscribe_activity.py @@ -13,7 +13,7 @@ def check(args): print(results) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/get_calendar_period_quantity.py b/passerelle/contrib/toulouse_maelis/tools/get_calendar_period_quantity.py index c3fb04d2..5d5a0f27 100755 --- a/passerelle/contrib/toulouse_maelis/tools/get_calendar_period_quantity.py +++ b/passerelle/contrib/toulouse_maelis/tools/get_calendar_period_quantity.py @@ -25,7 +25,7 @@ def check(args): print(result) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/get_child_subscribe_school_information.py b/passerelle/contrib/toulouse_maelis/tools/get_child_subscribe_school_information.py index 8b0f2f29..0c4e7ace 100755 --- a/passerelle/contrib/toulouse_maelis/tools/get_child_subscribe_school_information.py +++ b/passerelle/contrib/toulouse_maelis/tools/get_child_subscribe_school_information.py @@ -26,7 +26,7 @@ def check(args): print(results) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/get_family_basket.py b/passerelle/contrib/toulouse_maelis/tools/get_family_basket.py index 43e97459..f94041b5 100755 --- a/passerelle/contrib/toulouse_maelis/tools/get_family_basket.py +++ b/passerelle/contrib/toulouse_maelis/tools/get_family_basket.py @@ -19,7 +19,7 @@ def check(args): print(results) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/get_person_catalogue_activity.py b/passerelle/contrib/toulouse_maelis/tools/get_person_catalogue_activity.py index 9d41480a..432ba34f 100755 --- a/passerelle/contrib/toulouse_maelis/tools/get_person_catalogue_activity.py +++ b/passerelle/contrib/toulouse_maelis/tools/get_person_catalogue_activity.py @@ -82,7 +82,7 @@ def check(args): ) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/get_person_schedule_list.py b/passerelle/contrib/toulouse_maelis/tools/get_person_schedule_list.py index abe75fa4..e15d6e77 100755 --- a/passerelle/contrib/toulouse_maelis/tools/get_person_schedule_list.py +++ b/passerelle/contrib/toulouse_maelis/tools/get_person_schedule_list.py @@ -38,7 +38,7 @@ def check(args): print(day) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/get_person_unit_info.py b/passerelle/contrib/toulouse_maelis/tools/get_person_unit_info.py index f6357e27..5e7bf287 100755 --- a/passerelle/contrib/toulouse_maelis/tools/get_person_unit_info.py +++ b/passerelle/contrib/toulouse_maelis/tools/get_person_unit_info.py @@ -68,7 +68,7 @@ def check(args): print(' place: %s / %s' % (results['place']['idPlace'], results['place']['lib1'])) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/grep_activity_info.py b/passerelle/contrib/toulouse_maelis/tools/grep_activity_info.py index 54a3e60e..aa92ab95 100755 --- a/passerelle/contrib/toulouse_maelis/tools/grep_activity_info.py +++ b/passerelle/contrib/toulouse_maelis/tools/grep_activity_info.py @@ -62,7 +62,7 @@ def check(args): break -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/read_activity_list.py b/passerelle/contrib/toulouse_maelis/tools/read_activity_list.py index c1654d80..4e3478cd 100755 --- a/passerelle/contrib/toulouse_maelis/tools/read_activity_list.py +++ b/passerelle/contrib/toulouse_maelis/tools/read_activity_list.py @@ -105,7 +105,7 @@ def check(args): pprint.pprint(data) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/read_activity_nature_type.py b/passerelle/contrib/toulouse_maelis/tools/read_activity_nature_type.py index f0e88da8..7624c840 100755 --- a/passerelle/contrib/toulouse_maelis/tools/read_activity_nature_type.py +++ b/passerelle/contrib/toulouse_maelis/tools/read_activity_nature_type.py @@ -16,7 +16,7 @@ def check(args): print(' * %s: %s' % (a_type['code'], a_type['libelle'])) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/read_family.py b/passerelle/contrib/toulouse_maelis/tools/read_family.py index 2e9c824b..12945c57 100755 --- a/passerelle/contrib/toulouse_maelis/tools/read_family.py +++ b/passerelle/contrib/toulouse_maelis/tools/read_family.py @@ -22,7 +22,7 @@ def check(args): return result -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/read_invoices.py b/passerelle/contrib/toulouse_maelis/tools/read_invoices.py index 21d191fa..377a65fc 100755 --- a/passerelle/contrib/toulouse_maelis/tools/read_invoices.py +++ b/passerelle/contrib/toulouse_maelis/tools/read_invoices.py @@ -24,7 +24,7 @@ def check(args): print(result) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/read_nursery_list.py b/passerelle/contrib/toulouse_maelis/tools/read_nursery_list.py index 5ae27d5a..5a76c724 100755 --- a/passerelle/contrib/toulouse_maelis/tools/read_nursery_list.py +++ b/passerelle/contrib/toulouse_maelis/tools/read_nursery_list.py @@ -21,7 +21,7 @@ def check(args): print(json.dumps(serialize_object(results), cls=utils.DjangoJSONEncoder, indent=2)) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/read_referential_list.py b/passerelle/contrib/toulouse_maelis/tools/read_referential_list.py index 34822b1e..3694717e 100755 --- a/passerelle/contrib/toulouse_maelis/tools/read_referential_list.py +++ b/passerelle/contrib/toulouse_maelis/tools/read_referential_list.py @@ -37,7 +37,7 @@ def check(args): assert len(results) > 1 -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/read_school_for_adress_and_level.py b/passerelle/contrib/toulouse_maelis/tools/read_school_for_adress_and_level.py index 069ef083..4893b720 100755 --- a/passerelle/contrib/toulouse_maelis/tools/read_school_for_adress_and_level.py +++ b/passerelle/contrib/toulouse_maelis/tools/read_school_for_adress_and_level.py @@ -29,7 +29,7 @@ def check(args): print(results) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/read_school_for_child_and_level.py b/passerelle/contrib/toulouse_maelis/tools/read_school_for_child_and_level.py index f16957dd..370068af 100755 --- a/passerelle/contrib/toulouse_maelis/tools/read_school_for_child_and_level.py +++ b/passerelle/contrib/toulouse_maelis/tools/read_school_for_child_and_level.py @@ -21,7 +21,7 @@ def check(args): print(results) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/read_supplied_document_validity.py b/passerelle/contrib/toulouse_maelis/tools/read_supplied_document_validity.py index e4d0e663..35bd8a7b 100755 --- a/passerelle/contrib/toulouse_maelis/tools/read_supplied_document_validity.py +++ b/passerelle/contrib/toulouse_maelis/tools/read_supplied_document_validity.py @@ -25,7 +25,7 @@ def check(args): print(result) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/update_basket_time.py b/passerelle/contrib/toulouse_maelis/tools/update_basket_time.py index dd5bd191..c50f4363 100755 --- a/passerelle/contrib/toulouse_maelis/tools/update_basket_time.py +++ b/passerelle/contrib/toulouse_maelis/tools/update_basket_time.py @@ -17,7 +17,7 @@ def check(args): print(result) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/update_person_schedule_list.py b/passerelle/contrib/toulouse_maelis/tools/update_person_schedule_list.py index f12c70b2..8d81c4a7 100755 --- a/passerelle/contrib/toulouse_maelis/tools/update_person_schedule_list.py +++ b/passerelle/contrib/toulouse_maelis/tools/update_person_schedule_list.py @@ -34,7 +34,7 @@ def check(args): print(res) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/update_person_unit_basket.py b/passerelle/contrib/toulouse_maelis/tools/update_person_unit_basket.py index 956bb09d..444fe1f5 100755 --- a/passerelle/contrib/toulouse_maelis/tools/update_person_unit_basket.py +++ b/passerelle/contrib/toulouse_maelis/tools/update_person_unit_basket.py @@ -18,7 +18,7 @@ def check(args): print(result) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/update_week_calendar.py b/passerelle/contrib/toulouse_maelis/tools/update_week_calendar.py index bf07490c..f40a7662 100755 --- a/passerelle/contrib/toulouse_maelis/tools/update_week_calendar.py +++ b/passerelle/contrib/toulouse_maelis/tools/update_week_calendar.py @@ -17,7 +17,7 @@ def check(args): client = utils.get_client(args.env, 'Activity') if len(args.week) != 7: - raise Exception("week must contain 7 letters (on per week day)") + raise Exception('week must contain 7 letters (on per week day)') week = [] for i, unit_letter in enumerate(args.week): if unit_letter in ['_', ' ']: @@ -42,7 +42,7 @@ def check(args): print(result) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_maelis/tools/validate_basket.py b/passerelle/contrib/toulouse_maelis/tools/validate_basket.py index 3ab8903c..10b2356a 100755 --- a/passerelle/contrib/toulouse_maelis/tools/validate_basket.py +++ b/passerelle/contrib/toulouse_maelis/tools/validate_basket.py @@ -17,7 +17,7 @@ def check(args): print(result) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors') parser.add_argument('--env', '-e', default='integ', help='dev, test, integ, prod') diff --git a/passerelle/contrib/toulouse_smart/schemas.py b/passerelle/contrib/toulouse_smart/schemas.py index c924d6c8..e8f87c4f 100644 --- a/passerelle/contrib/toulouse_smart/schemas.py +++ b/passerelle/contrib/toulouse_smart/schemas.py @@ -28,11 +28,11 @@ CREATE_SCHEMA = { 'type': 'object', 'properties': { 'slug': { - 'description': "slug du block de champs intervention", + 'description': 'slug du block de champs intervention', 'type': 'string', }, 'description': { - 'description': "Description de la demande", + 'description': 'Description de la demande', 'type': 'string', }, 'lat': { @@ -115,7 +115,7 @@ CREATE_SCHEMA = { 'type': 'string', }, 'form_step': { - 'description': "Étape du traitement de la demande", + 'description': 'Étape du traitement de la demande', 'type': 'string', 'default': 'initial', }, @@ -165,19 +165,19 @@ UPDATE_SCHEMA = { 'pattern': '[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}', }, 'type_retour': { - 'description': "Type du retour", + 'description': 'Type du retour', 'type': 'string', }, 'type_retour_cloture': { - 'description': "Type du retour de la clôture", + 'description': 'Type du retour de la clôture', 'type': 'string', }, 'libelle_cloture': { - 'description': "Libellé de la clôture", + 'description': 'Libellé de la clôture', 'type': 'string', }, 'commentaire_cloture': { - 'description': "Commentaire de la clôture", + 'description': 'Commentaire de la clôture', 'type': 'string', }, }, @@ -203,15 +203,15 @@ MEDIA_SCHEMA = { 'type': 'object', 'properties': { 'filename': { - 'description': "Nom du ficher", + 'description': 'Nom du ficher', 'type': 'string', }, 'content_type': { - 'description': "Type MIME", + 'description': 'Type MIME', 'type': 'string', }, 'content': { - 'description': "Contenu", + 'description': 'Contenu', 'type': 'string', }, }, diff --git a/passerelle/contrib/utils/axel.py b/passerelle/contrib/utils/axel.py index cb5b11c2..71a1e8c9 100644 --- a/passerelle/contrib/utils/axel.py +++ b/passerelle/contrib/utils/axel.py @@ -51,17 +51,17 @@ xml_date_format = '%d/%m/%Y' xml_datetime_format = '%d/%m/%Y %H:%M:%S' -def indent(tree, space=" ", level=0): +def indent(tree, space=' ', level=0): # backport from Lib/xml/etree/ElementTree.py python 3.9 if isinstance(tree, ET.ElementTree): tree = tree.getroot() if level < 0: - raise ValueError(f"Initial indentation level must be >= 0, got {level}") + raise ValueError(f'Initial indentation level must be >= 0, got {level}') if len(tree) == 0: return # Reduce the memory consumption by reusing indentation strings. - indentations = ["\n" + level * space] + indentations = ['\n' + level * space] def _indent_children(elem, level): # Start a new indentation level for the first child. diff --git a/passerelle/sms/models.py b/passerelle/sms/models.py index e34a8dda..03c5dc8c 100644 --- a/passerelle/sms/models.py +++ b/passerelle/sms/models.py @@ -30,7 +30,7 @@ from passerelle.utils.jsonresponse import APIError SEND_SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', - "type": "object", + 'type': 'object', 'required': ['message', 'from', 'to'], 'properties': { 'message': { @@ -43,8 +43,8 @@ SEND_SCHEMA = { }, 'to': { 'description': 'Destination numbers', - "type": "array", - "items": {'type': 'string', 'pattern': r'^\+?[-.\s/\d]+$'}, + 'type': 'array', + 'items': {'type': 'string', 'pattern': r'^\+?[-.\s/\d]+$'}, }, }, } @@ -183,8 +183,8 @@ class SMSResource(BaseResource): authorized_numbers = sorted(set(destinations) - foreign_numbers - premium_numbers, key=int) - premium_numbers_string = ", ".join(sorted(premium_numbers, key=int)) - foreign_numbers_string = ", ".join(sorted(foreign_numbers - premium_numbers, key=int)) + premium_numbers_string = ', '.join(sorted(premium_numbers, key=int)) + foreign_numbers_string = ', '.join(sorted(foreign_numbers - premium_numbers, key=int)) if premium_numbers_string: logging.warning('unauthorized premium rate phone number: %s', premium_numbers_string) if foreign_numbers_string: diff --git a/passerelle/utils/__init__.py b/passerelle/utils/__init__.py index 187ee891..20aaa041 100644 --- a/passerelle/utils/__init__.py +++ b/passerelle/utils/__init__.py @@ -165,7 +165,7 @@ def protected_api(perm): @wraps(view_func) def _wrapped_view(instance, request, *args, **kwargs): if not isinstance(instance, SingleObjectMixin): - raise Exception("protected_api must be applied on a method of a class based view") + raise Exception('protected_api must be applied on a method of a class based view') obj = instance.get_object() if not is_authorized(request, obj, perm): raise PermissionDenied() diff --git a/passerelle/utils/conversion.py b/passerelle/utils/conversion.py index 9859bd9f..926a88d8 100644 --- a/passerelle/utils/conversion.py +++ b/passerelle/utils/conversion.py @@ -59,7 +59,7 @@ def char_filter(string): def clean_string(string): - return "".join(char_filter(string)) + return ''.join(char_filter(string)) def ensure_encoding(s, encoding): diff --git a/passerelle/utils/db.py b/passerelle/utils/db.py index 1533f99e..72ebea6f 100644 --- a/passerelle/utils/db.py +++ b/passerelle/utils/db.py @@ -43,4 +43,4 @@ class EnsureJsonbType(Operation): pass def describe(self): - return "Migrate to postgres jsonb type" + return 'Migrate to postgres jsonb type' diff --git a/passerelle/utils/jsonresponse.py b/passerelle/utils/jsonresponse.py index 78252700..f2ac53fc 100644 --- a/passerelle/utils/jsonresponse.py +++ b/passerelle/utils/jsonresponse.py @@ -55,7 +55,7 @@ class to_json: @functools.wraps(f) def wrapper(*args, **kwargs): method = self.api_func - if args and getattr(getattr(args[0], f.__name__, None), "__self__", False): + if args and getattr(getattr(args[0], f.__name__, None), '__self__', False): method = self.api_method return method(f, *args, **kwargs) @@ -67,23 +67,23 @@ class to_json: return obj def err_to_response(self, err): - if hasattr(err, "__module__"): - err_module = err.__module__ + "." + if hasattr(err, '__module__'): + err_module = err.__module__ + '.' else: - err_module = "" + err_module = '' - if hasattr(err, "owner"): - err_module += err.owner.__name__ + "." + if hasattr(err, 'owner'): + err_module += err.owner.__name__ + '.' err_class = err_module + err.__class__.__name__ err_desc = force_str(err) response = { - "err": 1, - "err_class": err_class, - "err_desc": err_desc, - "data": getattr(err, 'data', None), + 'err': 1, + 'err_class': err_class, + 'err_desc': err_desc, + 'data': getattr(err, 'data', None), } if getattr(err, 'extra_dict', None): response.update(err.extra_dict) @@ -100,19 +100,19 @@ class to_json: jsonp_cb = req.GET.get('callback') or req.GET.get('jsonpCallback') or CALLBACK_NAME if not re.match(r'^[$a-zA-Z_][a-zA-Z0-9_]*$', jsonp_cb): return HttpResponseBadRequest('invalid JSONP callback name') - content_type = "application/json" + content_type = 'application/json' kwargs = dict(self.kwargs) if debug: - kwargs["indent"] = 4 - kwargs["ensure_ascii"] = False + kwargs['indent'] = 4 + kwargs['ensure_ascii'] = False plain = json.dumps(data, **kwargs) if format == 'jsonp': - plain = "%s(%s);" % (jsonp_cb, plain) - content_type = "application/javascript" + plain = '%s(%s);' % (jsonp_cb, plain) + content_type = 'application/javascript' - return HttpResponse(plain, content_type="%s; charset=UTF-8" % content_type, status=status) + return HttpResponse(plain, content_type='%s; charset=UTF-8' % content_type, status=status) def api_func(self, f, *args, **kwargs): return self.api(f, args[0], *args, **kwargs) @@ -158,9 +158,9 @@ class to_json: # Http404 is for silent object not found exceptions pass elif getattr(e, 'log_error', True): - logger.exception("Error occurred while processing request", extra=extras) + logger.exception('Error occurred while processing request', extra=extras) else: - logger.warning("Error occurred while processing request", extra=extras) + logger.warning('Error occurred while processing request', extra=extras) try: really_raise = int(req.GET.get('raise') or 0) diff --git a/passerelle/utils/paginator.py b/passerelle/utils/paginator.py index 097638eb..43ce6de8 100644 --- a/passerelle/utils/paginator.py +++ b/passerelle/utils/paginator.py @@ -15,7 +15,7 @@ class InfinitePaginator(Paginator): if number == 1 and self.allow_empty_first_page: pass else: - raise EmptyPage(_("That page contains no results")) + raise EmptyPage(_('That page contains no results')) has_next = len(window_items) > len(page_items) return InfinitePage(page_items, number, self, has_next) diff --git a/passerelle/views.py b/passerelle/views.py index 4101ffff..1efad8e8 100644 --- a/passerelle/views.py +++ b/passerelle/views.py @@ -413,7 +413,7 @@ class GenericEndpointView(GenericConnectorMixin, SingleObjectMixin, View): try: data = json.loads(request.body) except ValueError as e: - raise APIError("could not decode body to json: %s" % e, http_status=400) + raise APIError('could not decode body to json: %s' % e, http_status=400) if must_unflatten: data = unflatten(data) if merge_extra and hasattr(data, 'items'): diff --git a/passerelle/wsgi.py b/passerelle/wsgi.py index 6710add0..dbc873e1 100644 --- a/passerelle/wsgi.py +++ b/passerelle/wsgi.py @@ -17,7 +17,7 @@ import os from django.core.wsgi import get_wsgi_application -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passerelle.settings") +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'passerelle.settings') # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION diff --git a/setup.py b/setup.py index a2f5d75e..8793da4d 100755 --- a/setup.py +++ b/setup.py @@ -133,7 +133,7 @@ setup( url='https://dev.entrouvert.org/projects/passerelle/', download_url='http://repos.entrouvert.org/passerelle.git/', author="Entr'ouvert", - author_email="info@entrouvert.com", + author_email='info@entrouvert.com', packages=find_packages(os.path.dirname(__file__) or '.'), scripts=['manage.py'], include_package_data=True, diff --git a/tests/conftest.py b/tests/conftest.py index a45e6e32..829f581e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -34,27 +34,27 @@ def api_adresse_data_gouv_fr_search(url, request): return response( 200, { - "limit": 1, - "attribution": "BAN", - "version": "draft", - "licence": "ODbL 1.0", - "query": "plop", - "type": "FeatureCollection", - "features": [ + 'limit': 1, + 'attribution': 'BAN', + 'version': 'draft', + 'licence': 'ODbL 1.0', + 'query': 'plop', + 'type': 'FeatureCollection', + 'features': [ { - "geometry": {"type": "Point", "coordinates": [-0.593775, 47.474633]}, - "properties": { - "citycode": "49007", - "name": "Rue Roger Halope", - "id": "49007_6950_be54bd", - "city": "Angers", - "context": "49, Maine-et-Loire, Pays de la Loire", - "score": 0.14097272727272728, - "label": "Rue Roger Halope 49000 Angers", - "postcode": "49000", - "type": "street", + 'geometry': {'type': 'Point', 'coordinates': [-0.593775, 47.474633]}, + 'properties': { + 'citycode': '49007', + 'name': 'Rue Roger Halope', + 'id': '49007_6950_be54bd', + 'city': 'Angers', + 'context': '49, Maine-et-Loire, Pays de la Loire', + 'score': 0.14097272727272728, + 'label': 'Rue Roger Halope 49000 Angers', + 'postcode': '49000', + 'type': 'street', }, - "type": "Feature", + 'type': 'Feature', } ], }, @@ -68,27 +68,27 @@ def api_adresse_data_gouv_fr_reverse(url, request): return response( 200, { - "limit": 1, - "attribution": "BAN", - "version": "draft", - "licence": "ODbL 1.0", - "type": "FeatureCollection", - "features": [ + 'limit': 1, + 'attribution': 'BAN', + 'version': 'draft', + 'licence': 'ODbL 1.0', + 'type': 'FeatureCollection', + 'features': [ { - "geometry": {"type": "Point", "coordinates": [-0.593775, 47.474633]}, - "properties": { - "citycode": "49007", - "name": "Rue Roger Halope", - "id": "49007_6950_be54bd", - "city": "Angers", - "distance": 0, - "context": "49, Maine-et-Loire, Pays de la Loire", - "score": 1.0, - "label": "Rue Roger Halope 49000 Angers", - "postcode": "49000", - "type": "street", + 'geometry': {'type': 'Point', 'coordinates': [-0.593775, 47.474633]}, + 'properties': { + 'citycode': '49007', + 'name': 'Rue Roger Halope', + 'id': '49007_6950_be54bd', + 'city': 'Angers', + 'distance': 0, + 'context': '49, Maine-et-Loire, Pays de la Loire', + 'score': 1.0, + 'label': 'Rue Roger Halope 49000 Angers', + 'postcode': '49000', + 'type': 'street', }, - "type": "Feature", + 'type': 'Feature', } ], }, diff --git a/tests/ldap/test_search_endpoint.py b/tests/ldap/test_search_endpoint.py index f3f3f1da..3e0b8663 100644 --- a/tests/ldap/test_search_endpoint.py +++ b/tests/ldap/test_search_endpoint.py @@ -405,7 +405,7 @@ def test_bad_requests(app, resource, ldap_server): response = app.get( '/ldap/resource/search', params={ - 'filter': "(cn~=Jane Doe)", + 'filter': '(cn~=Jane Doe)', 'ldap_base_dn': 'o=orga', 'id_attribute': 'uid', }, @@ -418,7 +418,7 @@ def test_bad_requests(app, resource, ldap_server): response = app.get( '/ldap/resource/search', params={ - 'filter': "(cn~=Jane Doe)", + 'filter': '(cn~=Jane Doe)', 'ldap_base_dn': 'o=orga', 'id_attribute': 'uid', 'search_attribute': 'bloqué', @@ -432,7 +432,7 @@ def test_bad_requests(app, resource, ldap_server): response = app.get( '/ldap/resource/search', params={ - 'filter': "(cn~=Jane Doe)", + 'filter': '(cn~=Jane Doe)', 'ldap_base_dn': 'o=orga', 'id_attribute': 'bloqué', 'search_attribute': 'cn', @@ -446,7 +446,7 @@ def test_bad_requests(app, resource, ldap_server): response = app.get( '/ldap/resource/search', params={ - 'filter': "(cn~=Jane Doe)", + 'filter': '(cn~=Jane Doe)', 'ldap_base_dn': 'o=orga', 'id_attribute': 'uid', 'search_attribute': 'cn', diff --git a/tests/settings.py b/tests/settings.py index 38abb4b8..e3a2a27f 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -65,7 +65,7 @@ DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'TEST': { - 'NAME': ('passerelle-test-%s' % os.environ.get("RAND_TEST", "")), + 'NAME': ('passerelle-test-%s' % os.environ.get('RAND_TEST', '')), }, } } @@ -74,4 +74,4 @@ LOGGED_REQUESTS_MAX_SIZE = 4999 LEGACY_URLS_MAPPING = {'old.org': 'new.org'} -PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"] +PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher'] diff --git a/tests/test_actesweb.py b/tests/test_actesweb.py index 7900ce34..a2902212 100644 --- a/tests/test_actesweb.py +++ b/tests/test_actesweb.py @@ -73,27 +73,27 @@ def test_demand_creation(app, payload, actesweb): assert_file_content_values( demfile, dict( # noqa pylint: disable=use-dict-literal - DEMANDEUR_CIVILITE="Madame", + DEMANDEUR_CIVILITE='Madame', DEMANDEUR_NOM_USAGE="W'hatever?", - DEMANDEUR_PRENOMS="Kim Chelsea", + DEMANDEUR_PRENOMS='Kim Chelsea', DEMANDEUR_ADRESSE1="37 Rue de l'Aigle Blanc", - DEMANDEUR_VILLE="Nancy", - DEMANDEUR_CP="54000", - DEMANDEUR_PAYS="France", - DEMANDEUR_TEL="+33 6 55 44 22 11", - DEMANDEUR_ADR="chelsea@whatever.com", - DEMANDE_NOM="Whatever", - DEMANDE_PRENOMS="Kevin", - DEMANDE_DATE_EVENEMENT="20120714", - TYPE_DEMANDE="Copie Integrale", - ACTE="NA", - NB="1", - LIEU_EVENEMENT="Nancy", - PERE_NOM="Smith", - PERE_PRENOMS="John Oliver", - MERE_NOM="Smith", - MERE_PRENOM="Kim", - DEMANDE_SEXE="m", + DEMANDEUR_VILLE='Nancy', + DEMANDEUR_CP='54000', + DEMANDEUR_PAYS='France', + DEMANDEUR_TEL='+33 6 55 44 22 11', + DEMANDEUR_ADR='chelsea@whatever.com', + DEMANDE_NOM='Whatever', + DEMANDE_PRENOMS='Kevin', + DEMANDE_DATE_EVENEMENT='20120714', + TYPE_DEMANDE='Copie Integrale', + ACTE='NA', + NB='1', + LIEU_EVENEMENT='Nancy', + PERE_NOM='Smith', + PERE_PRENOMS='John Oliver', + MERE_NOM='Smith', + MERE_PRENOM='Kim', + DEMANDE_SEXE='m', ), ) elif 'mariage' in payload: @@ -103,34 +103,34 @@ def test_demand_creation(app, payload, actesweb): assert_file_content_values( demfile, dict( # noqa pylint: disable=use-dict-literal - DEMANDEUR_CIVILITE="Madame", - DEMANDEUR_NOM_USAGE="Whatever", - DEMANDEUR_NOM="Bar", - DEMANDEUR_PRENOMS="Zoé", - DEMANDEUR_ADRESSE1="169, rue du Château", - DEMANDEUR_VILLE="Nancy", - DEMANDEUR_CP="54001", - DEMANDEUR_PAYS="France", - DEMANDEUR_TEL="+33 6 55 44 22 11", - DEMANDEUR_ADR="chelsea@whatever.com", - DEMANDE_NOM="Whatever", - DEMANDE_PRENOMS="Kevin", - DEMANDE_DATE_EVENEMENT="20120714", - TYPE_DEMANDE="Copie Integrale", - ACTE="MA", - NB="1", - LIEU_EVENEMENT="Nancy", - PERE_NOM="Smith", - PERE_PRENOMS="John Oliver", - MERE_NOM="Smith", - MERE_PRENOM="Kim", - DEMANDE_SEXE="m", - CONJOINT_NOM="Contrao", - CONJOIN_PRENOMS="Chelsea", - CONJOINT_PERE_NOM="Scaramucci", - CONJOINT_PERE_PRENOMS="Antonio", - CONJOINT_MERE_NOM="Scaramucci", - CONJOINT_MERE_PRENOMS="Marguerite", + DEMANDEUR_CIVILITE='Madame', + DEMANDEUR_NOM_USAGE='Whatever', + DEMANDEUR_NOM='Bar', + DEMANDEUR_PRENOMS='Zoé', + DEMANDEUR_ADRESSE1='169, rue du Château', + DEMANDEUR_VILLE='Nancy', + DEMANDEUR_CP='54001', + DEMANDEUR_PAYS='France', + DEMANDEUR_TEL='+33 6 55 44 22 11', + DEMANDEUR_ADR='chelsea@whatever.com', + DEMANDE_NOM='Whatever', + DEMANDE_PRENOMS='Kevin', + DEMANDE_DATE_EVENEMENT='20120714', + TYPE_DEMANDE='Copie Integrale', + ACTE='MA', + NB='1', + LIEU_EVENEMENT='Nancy', + PERE_NOM='Smith', + PERE_PRENOMS='John Oliver', + MERE_NOM='Smith', + MERE_PRENOM='Kim', + DEMANDE_SEXE='m', + CONJOINT_NOM='Contrao', + CONJOIN_PRENOMS='Chelsea', + CONJOINT_PERE_NOM='Scaramucci', + CONJOINT_PERE_PRENOMS='Antonio', + CONJOINT_MERE_NOM='Scaramucci', + CONJOINT_MERE_PRENOMS='Marguerite', ), ) else: @@ -147,22 +147,22 @@ def test_demand_creation(app, payload, actesweb): assert_file_content_values( demfile, dict( # noqa pylint: disable=use-dict-literal - DEMANDEUR_CIVILITE="Madame", - DEMANDEUR_NOM_USAGE="Whatever", - DEMANDEUR_PRENOMS="Kim Chelsea", + DEMANDEUR_CIVILITE='Madame', + DEMANDEUR_NOM_USAGE='Whatever', + DEMANDEUR_PRENOMS='Kim Chelsea', DEMANDEUR_ADRESSE1="37 Rue de l'Aigle Blanc", - DEMANDEUR_VILLE="Nancy", - DEMANDEUR_CP="54000", - DEMANDEUR_PAYS="France", - DEMANDEUR_TEL="+33 6 55 44 22 11", - DEMANDEUR_ADR="chelsea@whatever.com", - DEMANDE_NOM="Whatever", - DEMANDE_PRENOMS="Kevin", - DEMANDE_DATE_EVENEMENT="20120714", - TYPE_DEMANDE="Extrait sans filiation", - ACTE="DE", - NB="1", - LIEU_EVENEMENT="Nancy", - DEMANDE_SEXE="m", + DEMANDEUR_VILLE='Nancy', + DEMANDEUR_CP='54000', + DEMANDEUR_PAYS='France', + DEMANDEUR_TEL='+33 6 55 44 22 11', + DEMANDEUR_ADR='chelsea@whatever.com', + DEMANDE_NOM='Whatever', + DEMANDE_PRENOMS='Kevin', + DEMANDE_DATE_EVENEMENT='20120714', + TYPE_DEMANDE='Extrait sans filiation', + ACTE='DE', + NB='1', + LIEU_EVENEMENT='Nancy', + DEMANDE_SEXE='m', ), ) diff --git a/tests/test_address.py b/tests/test_address.py index 393126f6..a1db6261 100644 --- a/tests/test_address.py +++ b/tests/test_address.py @@ -27,184 +27,184 @@ from passerelle.apps.sector.models import SectorResource from tests.test_manager import login BAN = { - "attribution": "BAN", - "features": [ + 'attribution': 'BAN', + 'features': [ { - "geometry": {"coordinates": [2.323365, 48.833702], "type": "Point"}, - "properties": { - "city": "Paris", - "citycode": "75114", - "context": "75, Paris, Île-de-France", - "district": "Paris 14e Arrondissement", - "housenumber": "169", - "id": "75114_1912_00169", - "importance": 0.77751, - "label": "169 Rue du Château 75014 Paris", - "name": "169 Rue du Château", - "postcode": "75014", - "score": 0.9797736363636363, - "street": "Rue du Château", - "type": "housenumber", - "x": 650331.55, - "y": 6859506.98, + 'geometry': {'coordinates': [2.323365, 48.833702], 'type': 'Point'}, + 'properties': { + 'city': 'Paris', + 'citycode': '75114', + 'context': '75, Paris, Île-de-France', + 'district': 'Paris 14e Arrondissement', + 'housenumber': '169', + 'id': '75114_1912_00169', + 'importance': 0.77751, + 'label': '169 Rue du Château 75014 Paris', + 'name': '169 Rue du Château', + 'postcode': '75014', + 'score': 0.9797736363636363, + 'street': 'Rue du Château', + 'type': 'housenumber', + 'x': 650331.55, + 'y': 6859506.98, }, - "type": "Feature", + 'type': 'Feature', }, { - "geometry": {"coordinates": [2.323365, 48.833702], "type": "Point"}, - "properties": { - "city": "Paris", - "citycode": "75114", - "context": "75, Paris, Île-de-France", - "district": "Paris 14e Arrondissement", - "housenumber": "167", - "id": "75114_1912_00167", - "importance": 0.77751, - "label": "167 Rue du Château 75014 Paris", - "name": "167 Rue du Château", - "postcode": "75014", - "score": 0.9797736363636363, - "street": "Rue du Château", - "type": "housenumber", - "x": 650331.55, - "y": 6859506.98, + 'geometry': {'coordinates': [2.323365, 48.833702], 'type': 'Point'}, + 'properties': { + 'city': 'Paris', + 'citycode': '75114', + 'context': '75, Paris, Île-de-France', + 'district': 'Paris 14e Arrondissement', + 'housenumber': '167', + 'id': '75114_1912_00167', + 'importance': 0.77751, + 'label': '167 Rue du Château 75014 Paris', + 'name': '167 Rue du Château', + 'postcode': '75014', + 'score': 0.9797736363636363, + 'street': 'Rue du Château', + 'type': 'housenumber', + 'x': 650331.55, + 'y': 6859506.98, }, - "type": "Feature", + 'type': 'Feature', }, { - "geometry": {"coordinates": [2.323365, 48.833702], "type": "Point"}, - "properties": { - "city": "Paris", - "citycode": "75114", - "context": "75, Paris, Île-de-France", - "district": "Paris 14e Arrondissement", - "housenumber": "170", - "id": "75114_1912_00169", - "importance": 0.77751, - "label": "170 Rue du Château 75014 Paris", - "name": "170 Rue du Château", - "postcode": "75014", - "score": 0.9797736363636363, - "street": "Rue du Château", - "type": "housenumber", - "x": 650331.55, - "y": 6859506.98, + 'geometry': {'coordinates': [2.323365, 48.833702], 'type': 'Point'}, + 'properties': { + 'city': 'Paris', + 'citycode': '75114', + 'context': '75, Paris, Île-de-France', + 'district': 'Paris 14e Arrondissement', + 'housenumber': '170', + 'id': '75114_1912_00169', + 'importance': 0.77751, + 'label': '170 Rue du Château 75014 Paris', + 'name': '170 Rue du Château', + 'postcode': '75014', + 'score': 0.9797736363636363, + 'street': 'Rue du Château', + 'type': 'housenumber', + 'x': 650331.55, + 'y': 6859506.98, }, - "type": "Feature", + 'type': 'Feature', }, # no house number { - "geometry": {"coordinates": [2.323365, 48.833702], "type": "Point"}, - "properties": { - "city": "Paris", - "citycode": "75114", - "context": "75, Paris, Île-de-France", - "district": "Paris 14e Arrondissement", - "id": "75114_1912_00169", - "importance": 0.77751, - "label": "XX Rue du Château 75014 Paris", - "name": "Rue du Château", - "postcode": "75014", - "score": 0.9797736363636363, - "street": "Rue du Château", - "type": "housenumber", - "x": 650331.55, - "y": 6859506.98, + 'geometry': {'coordinates': [2.323365, 48.833702], 'type': 'Point'}, + 'properties': { + 'city': 'Paris', + 'citycode': '75114', + 'context': '75, Paris, Île-de-France', + 'district': 'Paris 14e Arrondissement', + 'id': '75114_1912_00169', + 'importance': 0.77751, + 'label': 'XX Rue du Château 75014 Paris', + 'name': 'Rue du Château', + 'postcode': '75014', + 'score': 0.9797736363636363, + 'street': 'Rue du Château', + 'type': 'housenumber', + 'x': 650331.55, + 'y': 6859506.98, }, - "type": "Feature", + 'type': 'Feature', }, { - "geometry": {"coordinates": [2.323365, 48.833702], "type": "Point"}, - "properties": { - "city": "Paris", - "citycode": "75114", - "context": "75, Paris, Île-de-France", - "district": "Paris 14e Arrondissement", - "id": "75114_1913_00000", - "importance": 0.77751, - "label": "YY Rue du Château 75014 Paris", - "name": "Rue du Château", - "postcode": "75014", - "score": 0.9797736363636363, - "street": "Rue du Château", - "type": "housenumber", - "x": 650331.55, - "y": 6859506.98, + 'geometry': {'coordinates': [2.323365, 48.833702], 'type': 'Point'}, + 'properties': { + 'city': 'Paris', + 'citycode': '75114', + 'context': '75, Paris, Île-de-France', + 'district': 'Paris 14e Arrondissement', + 'id': '75114_1913_00000', + 'importance': 0.77751, + 'label': 'YY Rue du Château 75014 Paris', + 'name': 'Rue du Château', + 'postcode': '75014', + 'score': 0.9797736363636363, + 'street': 'Rue du Château', + 'type': 'housenumber', + 'x': 650331.55, + 'y': 6859506.98, }, - "type": "Feature", + 'type': 'Feature', }, # empty id (no street_id) { - "geometry": {"coordinates": [2.323365, 48.833702], "type": "Point"}, - "properties": { - "city": "Paris", - "citycode": "75114", - "context": "75, Paris, Île-de-France", - "district": "Paris 14e Arrondissement", - "housenumber": "169", - "id": "", # empty id => no street_id - "importance": 0.77751, - "label": "169 Rue du Château 75014 Paris", - "name": "169 Rue du Château", - "postcode": "75014", - "score": 0.9797736363636363, - "street": "Rue du Château", - "type": "housenumber", - "x": 650331.55, - "y": 6859506.98, + 'geometry': {'coordinates': [2.323365, 48.833702], 'type': 'Point'}, + 'properties': { + 'city': 'Paris', + 'citycode': '75114', + 'context': '75, Paris, Île-de-France', + 'district': 'Paris 14e Arrondissement', + 'housenumber': '169', + 'id': '', # empty id => no street_id + 'importance': 0.77751, + 'label': '169 Rue du Château 75014 Paris', + 'name': '169 Rue du Château', + 'postcode': '75014', + 'score': 0.9797736363636363, + 'street': 'Rue du Château', + 'type': 'housenumber', + 'x': 650331.55, + 'y': 6859506.98, }, - "type": "Feature", + 'type': 'Feature', }, # id is not a string => no street_id { - "geometry": {"coordinates": [2.323365, 48.833702], "type": "Point"}, - "properties": { - "city": "Paris", - "citycode": "75114", - "context": "75, Paris, Île-de-France", - "district": "Paris 14e Arrondissement", - "housenumber": "169", - "id": 42, # integer id => no street_id - "importance": 0.77751, - "label": "169 Rue du Château 75014 Paris", - "name": "169 Rue du Château", - "postcode": "75014", - "score": 0.9797736363636363, - "street": "Rue du Château", - "type": "housenumber", - "x": 650331.55, - "y": 6859506.98, + 'geometry': {'coordinates': [2.323365, 48.833702], 'type': 'Point'}, + 'properties': { + 'city': 'Paris', + 'citycode': '75114', + 'context': '75, Paris, Île-de-France', + 'district': 'Paris 14e Arrondissement', + 'housenumber': '169', + 'id': 42, # integer id => no street_id + 'importance': 0.77751, + 'label': '169 Rue du Château 75014 Paris', + 'name': '169 Rue du Château', + 'postcode': '75014', + 'score': 0.9797736363636363, + 'street': 'Rue du Château', + 'type': 'housenumber', + 'x': 650331.55, + 'y': 6859506.98, }, - "type": "Feature", + 'type': 'Feature', }, # id without "_" => no street_id { - "geometry": {"coordinates": [2.323365, 48.833702], "type": "Point"}, - "properties": { - "city": "Paris", - "citycode": "75114", - "context": "75, Paris, Île-de-France", - "district": "Paris 14e Arrondissement", - "housenumber": "169", - "id": "123", # no "_" => no street_id - "importance": 0.77751, - "label": "169 Rue du Château 75014 Paris", - "name": "169 Rue du Château", - "postcode": "75014", - "score": 0.9797736363636363, - "street": "Rue du Château", - "type": "housenumber", - "x": 650331.55, - "y": 6859506.98, + 'geometry': {'coordinates': [2.323365, 48.833702], 'type': 'Point'}, + 'properties': { + 'city': 'Paris', + 'citycode': '75114', + 'context': '75, Paris, Île-de-France', + 'district': 'Paris 14e Arrondissement', + 'housenumber': '169', + 'id': '123', # no "_" => no street_id + 'importance': 0.77751, + 'label': '169 Rue du Château 75014 Paris', + 'name': '169 Rue du Château', + 'postcode': '75014', + 'score': 0.9797736363636363, + 'street': 'Rue du Château', + 'type': 'housenumber', + 'x': 650331.55, + 'y': 6859506.98, }, - "type": "Feature", + 'type': 'Feature', }, ], - "licence": "ETALAB-2.0", - "limit": 5, - "query": "169 rue du chateau, 75014 Paris", - "type": "FeatureCollection", - "version": "draft", + 'licence': 'ETALAB-2.0', + 'limit': 5, + 'query': '169 rue du chateau, 75014 Paris', + 'type': 'FeatureCollection', + 'version': 'draft', } CSV = """street_id,street_name,parity,min_housenumber,max_housenumber,sector_id,sector_name diff --git a/tests/test_adullact_pastell.py b/tests/test_adullact_pastell.py index fe13cade..d883d414 100644 --- a/tests/test_adullact_pastell.py +++ b/tests/test_adullact_pastell.py @@ -44,7 +44,7 @@ def test_auth_headers(app, setup): responses.add( responses.GET, 'http://example.com/api/v2/version', - json={"version": "3.0.4", "revision": 54322}, + json={'version': '3.0.4', 'revision': 54322}, status=200, ) setup.check_status() @@ -76,12 +76,12 @@ def test_list_entities(app, setup): 'http://example.com/api/v2/entite', json=[ { - "id_e": "7", - "denomination": "Publik", - "siren": "198307662", - "type": "collectivite", - "centre_de_gestion": "0", - "entite_mere": "1", + 'id_e': '7', + 'denomination': 'Publik', + 'siren': '198307662', + 'type': 'collectivite', + 'centre_de_gestion': '0', + 'entite_mere': '1', } ], status=200, @@ -105,14 +105,14 @@ def test_list_documents(app, setup): 'http://example.com/api/v2/entite/7/document', json=[ { - "id_d": "MNYDNCa", - "id_e": "7", - "type": "publik", - "titre": "TestConnecteur", - "creation": "2023-06-27 18:02:04", - "modification": "2023-06-27 18:02:28", - "denomination": "Publik", - "entite_base": "Publik", + 'id_d': 'MNYDNCa', + 'id_e': '7', + 'type': 'publik', + 'titre': 'TestConnecteur', + 'creation': '2023-06-27 18:02:04', + 'modification': '2023-06-27 18:02:28', + 'denomination': 'Publik', + 'entite_base': 'Publik', } ], status=200, @@ -134,19 +134,19 @@ def test_get_document_details(app, setup): responses.GET, 'http://example.com/api/v2/entite/7/document/MNYDNCa', json={ - "info": { - "id_d": "MNYDNCa", - "type": "publik", - "titre": "TestConnecteur", - "creation": "2023-06-27 18:02:04", - "modification": "2023-06-27 18:02:28", + 'info': { + 'id_d': 'MNYDNCa', + 'type': 'publik', + 'titre': 'TestConnecteur', + 'creation': '2023-06-27 18:02:04', + 'modification': '2023-06-27 18:02:28', }, - "data": { - "envoi_signature": "checked", - "envoi_iparapheur": "1", - "nom_dossier": "TestConnecteur", - "iparapheur_type": "Type Publik", - "iparapheur_sous_type": "SPublik", + 'data': { + 'envoi_signature': 'checked', + 'envoi_iparapheur': '1', + 'nom_dossier': 'TestConnecteur', + 'iparapheur_type': 'Type Publik', + 'iparapheur_sous_type': 'SPublik', }, }, status=200, @@ -168,22 +168,22 @@ def test_create_document(app, setup): responses.POST, 'http://example.com/api/v2/entite/7/document', json={ - "info": { - "id_d": "67WaYzM", - "type": "publik", - "titre": "", - "creation": "2023-06-27 17:25:54", - "modification": "2023-06-27 17:25:54", + 'info': { + 'id_d': '67WaYzM', + 'type': 'publik', + 'titre': '', + 'creation': '2023-06-27 17:25:54', + 'modification': '2023-06-27 17:25:54', }, - "data": {"envoi_signature": "checked", "envoi_iparapheur": "1"}, - "action_possible": ["modification", "supression"], - "action-possible": ["modification", "supression"], - "last_action": { - "action": "creation", - "message": "Cr\u00e9ation du document", - "date": "2023-06-27 17:25:54", + 'data': {'envoi_signature': 'checked', 'envoi_iparapheur': '1'}, + 'action_possible': ['modification', 'supression'], + 'action-possible': ['modification', 'supression'], + 'last_action': { + 'action': 'creation', + 'message': 'Cr\u00e9ation du document', + 'date': '2023-06-27 17:25:54', }, - "id_d": "67WaYzM", + 'id_d': '67WaYzM', }, status=200, ) @@ -191,25 +191,25 @@ def test_create_document(app, setup): responses.PATCH, 'http://example.com/api/v2/entite/7/document/67WaYzM', json={ - "content": { - "info": { - "id_d": "MNYDNCa", - "type": "publik", - "titre": "TestConnecteur", - "creation": "2023-06-27 18:02:04", - "modification": "2023-06-27 18:02:28", + 'content': { + 'info': { + 'id_d': 'MNYDNCa', + 'type': 'publik', + 'titre': 'TestConnecteur', + 'creation': '2023-06-27 18:02:04', + 'modification': '2023-06-27 18:02:28', }, - "data": { - "envoi_signature": "checked", - "envoi_iparapheur": "1", - "nom_dossier": "TestConnecteu", - "iparapheur_type": "Type Publik", - "iparapheur_sous_type": "SPublik", + 'data': { + 'envoi_signature': 'checked', + 'envoi_iparapheur': '1', + 'nom_dossier': 'TestConnecteu', + 'iparapheur_type': 'Type Publik', + 'iparapheur_sous_type': 'SPublik', }, }, - "result": "ok", - "formulaire_ok": 0, - "message": "Le formulaire est incomplet : le champ \u00abDocument\u00bb est obligatoire.", + 'result': 'ok', + 'formulaire_ok': 0, + 'message': 'Le formulaire est incomplet : le champ \u00abDocument\u00bb est obligatoire.', }, status=200, ) diff --git a/tests/test_airquality.py b/tests/test_airquality.py index 9c9f2dba..6daf85e8 100644 --- a/tests/test_airquality.py +++ b/tests/test_airquality.py @@ -6,50 +6,50 @@ import tests.utils from passerelle.apps.airquality.models import AirQuality SAMPLE_RESPONSE = { - "licence": "https://opendatacommons.org/licenses/odbl/", - "commune": "LYON-1ER-ARRONDISSEMENT", - "code_insee": "69381", - "indices": { - "current_page": 1, - "data": [ + 'licence': 'https://opendatacommons.org/licenses/odbl/', + 'commune': 'LYON-1ER-ARRONDISSEMENT', + 'code_insee': '69381', + 'indices': { + 'current_page': 1, + 'data': [ { - "date": "2020-12-22", - "valeur": "26.6503231768126", - "couleur_html": "#5CCB60", - "qualificatif": "Bon", - "type_valeur": "prévision", + 'date': '2020-12-22', + 'valeur': '26.6503231768126', + 'couleur_html': '#5CCB60', + 'qualificatif': 'Bon', + 'type_valeur': 'prévision', }, { - "date": "2020-12-21", - "valeur": "21.6876695818178", - "couleur_html": "#5CCB60", - "qualificatif": "Bon", - "type_valeur": "prévision", + 'date': '2020-12-21', + 'valeur': '21.6876695818178', + 'couleur_html': '#5CCB60', + 'qualificatif': 'Bon', + 'type_valeur': 'prévision', }, { - "date": "2020-12-20", - "valeur": "26.1405508214683", - "couleur_html": "#5CCB60", - "qualificatif": "Bon", - "type_valeur": "prévision", + 'date': '2020-12-20', + 'valeur': '26.1405508214683', + 'couleur_html': '#5CCB60', + 'qualificatif': 'Bon', + 'type_valeur': 'prévision', }, ], }, - "first_page_url": "https://api.atmo-aura.fr/communes/69381/indices?api_token=XXX&page=1", - "from": 1, - "last_page": 23, - "last_page_url": "https://api.atmo-aura.fr/communes/69381/indices?api_token=XXX&page=23", - "next_page_url": "https://api.atmo-aura.fr/communes/69381/indices?api_token=XXX&page=2", - "path": "https://api.atmo-aura.fr/communes/69381/indices", - "per_page": 50, - "prev_page_url": None, - "to": 50, - "total": 1137, + 'first_page_url': 'https://api.atmo-aura.fr/communes/69381/indices?api_token=XXX&page=1', + 'from': 1, + 'last_page': 23, + 'last_page_url': 'https://api.atmo-aura.fr/communes/69381/indices?api_token=XXX&page=23', + 'next_page_url': 'https://api.atmo-aura.fr/communes/69381/indices?api_token=XXX&page=2', + 'path': 'https://api.atmo-aura.fr/communes/69381/indices', + 'per_page': 50, + 'prev_page_url': None, + 'to': 50, + 'total': 1137, } SAMPLE_COMMENT_RESPONSE = { - "licence": "https://opendatacommons.org/licenses/odbl/", - "commentaire": "Jeudi 11 mai, le temps perturbé a permis d’avoir une bonne qualité de l’air sur la zone de surveillance.", + 'licence': 'https://opendatacommons.org/licenses/odbl/', + 'commentaire': 'Jeudi 11 mai, le temps perturbé a permis d’avoir une bonne qualité de l’air sur la zone de surveillance.', } diff --git a/tests/test_api_entreprise.py b/tests/test_api_entreprise.py index d2dcb4e4..c66f17e2 100644 --- a/tests/test_api_entreprise.py +++ b/tests/test_api_entreprise.py @@ -25,201 +25,201 @@ from passerelle.apps.api_entreprise.models import APIEntreprise from tests.utils import FakedResponse, make_resource ETABLISSEMENTS_RESPONSE = { - "data": { - "siege_social": True, - "siret": "41816609600051", - "date_mise_a_jour": 1449183600, - "tranche_effectif_salarie": { - "de": 200, - "a": 249, - "code": "31", - "date_reference": "2014", - "intitule": "200 à 249 salariés", + 'data': { + 'siege_social': True, + 'siret': '41816609600051', + 'date_mise_a_jour': 1449183600, + 'tranche_effectif_salarie': { + 'de': 200, + 'a': 249, + 'code': '31', + 'date_reference': '2014', + 'intitule': '200 à 249 salariés', }, - "date_creation": 1108594800, - "region_implantation": {"code": "11", "value": "Île-de-France"}, - "adresse": { - "acheminement_postal": { - "l1": "OCTO TECHNOLOGY", - "l2": None, - "l3": None, - "l4": "50 AVENUE DES CHAMPS ELYSEES", - "l5": None, - "l6": "75008 PARIS", - "l7": "FRANCE", + 'date_creation': 1108594800, + 'region_implantation': {'code': '11', 'value': 'Île-de-France'}, + 'adresse': { + 'acheminement_postal': { + 'l1': 'OCTO TECHNOLOGY', + 'l2': None, + 'l3': None, + 'l4': '50 AVENUE DES CHAMPS ELYSEES', + 'l5': None, + 'l6': '75008 PARIS', + 'l7': 'FRANCE', }, - "numero_voie": "50", - "type_voie": "AV", - "libelle_voie": "DES CHAMPS ELYSEES", - "code_postal": "75008", - "libelle_commune": "PARIS 8", - "code_commune": "75108", + 'numero_voie': '50', + 'type_voie': 'AV', + 'libelle_voie': 'DES CHAMPS ELYSEES', + 'code_postal': '75008', + 'libelle_commune': 'PARIS 8', + 'code_commune': '75108', }, - "etat_administratif": {"value": "F", "date_fermeture": 1315173600}, - "activite_principale": { - "code": "62.02A", - "libelle": "Conseil en systèmes et logiciels informatiques", + 'etat_administratif': {'value': 'F', 'date_fermeture': 1315173600}, + 'activite_principale': { + 'code': '62.02A', + 'libelle': 'Conseil en systèmes et logiciels informatiques', }, }, - "gateway_error": False, + 'gateway_error': False, } UNITES_LEGALES_RESPONSE = { - "data": { - "siren": "418166096", - "siret_siege_social": "41816609600051", - "numero_tva_intracommunautaire": "FR16418166096", - "forme_juridique": { - "libelle": "SA à directoire (s.a.i.)", - "code": "5699", + 'data': { + 'siren': '418166096', + 'siret_siege_social': '41816609600051', + 'numero_tva_intracommunautaire': 'FR16418166096', + 'forme_juridique': { + 'libelle': 'SA à directoire (s.a.i.)', + 'code': '5699', }, - "activite_principale": { - "code": "62.02A", - "libelle": "Conseil en systèmes et logiciels informatiques", + 'activite_principale': { + 'code': '62.02A', + 'libelle': 'Conseil en systèmes et logiciels informatiques', }, - "personne_morale_attributs": { - "raison_sociale": "OCTO-TECHNOLOGY", + 'personne_morale_attributs': { + 'raison_sociale': 'OCTO-TECHNOLOGY', }, - "tranche_effectif_salarie": { - "de": 200, - "a": 249, - "code": "31", - "date_reference": "2014", - "intitule": "200 à 249 salariés", + 'tranche_effectif_salarie': { + 'de': 200, + 'a': 249, + 'code': '31', + 'date_reference': '2014', + 'intitule': '200 à 249 salariés', }, - "date_creation": 891381600, - "categorie_entreprise": "PME", - "diffusable_commercialement": True, + 'date_creation': 891381600, + 'categorie_entreprise': 'PME', + 'diffusable_commercialement': True, } } UNITES_LEGALES_SIEGE_RESPONSE = { - "data": { - "siret": "41816609600051", - "adresse": { - "numero_voie": "50", - "type_voie": "AV", - "libelle_voie": "DES CHAMPS ELYSEES", - "code_postal": "75008", - "libelle_commune": "PARIS 8", - "code_commune": "75108", - "acheminement_postal": { - "l1": "OCTO TECHNOLOGY", - "l4": "50 AVENUE DES CHAMPS ELYSEES", - "l6": "75008 PARIS", - "l7": "FRANCE", + 'data': { + 'siret': '41816609600051', + 'adresse': { + 'numero_voie': '50', + 'type_voie': 'AV', + 'libelle_voie': 'DES CHAMPS ELYSEES', + 'code_postal': '75008', + 'libelle_commune': 'PARIS 8', + 'code_commune': '75108', + 'acheminement_postal': { + 'l1': 'OCTO TECHNOLOGY', + 'l4': '50 AVENUE DES CHAMPS ELYSEES', + 'l6': '75008 PARIS', + 'l7': 'FRANCE', }, }, }, - "meta": { - "date_derniere_mise_a_jour": 1449183600, + 'meta': { + 'date_derniere_mise_a_jour': 1449183600, }, } RCS_UNITES_LEGALES_MANDATAIRES_SOCIAUX_RESPONSE = { - "data": [ + 'data': [ { - "data": { - "nom": "HISQUIN", - "prenom": "FRANCOIS CARLOS PIOTR", - "fonction": "PRESIDENT DU DIRECTOIRE", - "dirigeant": True, - "date_naissance": "1965-01-27", - "raison_sociale": "", - "identifiant": "", - "type": "PP", + 'data': { + 'nom': 'HISQUIN', + 'prenom': 'FRANCOIS CARLOS PIOTR', + 'fonction': 'PRESIDENT DU DIRECTOIRE', + 'dirigeant': True, + 'date_naissance': '1965-01-27', + 'raison_sociale': '', + 'identifiant': '', + 'type': 'PP', }, }, { - "data": { - "nom": "MICHU", - "prenom": "ETIENNE,ALI,CLAY", - "fonction": "FIGURANT LDIF", - "dirigeant": True, - "date_naissance": "1969-08-15", - "date_naissance_timestamp": 0, - "raison_sociale": "BCRH & ASSOCIES - SOCIETE A RESPONSABILITE LIMITEE A ASSOCIE UNIQUE", - "identifiant": "490092574", - "type": "PP", + 'data': { + 'nom': 'MICHU', + 'prenom': 'ETIENNE,ALI,CLAY', + 'fonction': 'FIGURANT LDIF', + 'dirigeant': True, + 'date_naissance': '1969-08-15', + 'date_naissance_timestamp': 0, + 'raison_sociale': 'BCRH & ASSOCIES - SOCIETE A RESPONSABILITE LIMITEE A ASSOCIE UNIQUE', + 'identifiant': '490092574', + 'type': 'PP', }, }, { - "data": { - "fonction": "COMMISSAIRE AUX COMPTES SUPPLEANT", - "dirigeant": True, - "date_naissance": "", - "date_naissance_timestamp": 0, - "raison_sociale": "BCRH & ASSOCIES - SOCIETE A RESPONSABILITE LIMITEE A ASSOCIE UNIQUE", - "identifiant": "490092574", - "type": "PM", + 'data': { + 'fonction': 'COMMISSAIRE AUX COMPTES SUPPLEANT', + 'dirigeant': True, + 'date_naissance': '', + 'date_naissance_timestamp': 0, + 'raison_sociale': 'BCRH & ASSOCIES - SOCIETE A RESPONSABILITE LIMITEE A ASSOCIE UNIQUE', + 'identifiant': '490092574', + 'type': 'PM', }, }, ] } EXTRAITS_RCS_RESPONSE = { - "data": { - "siren": "418166096", - "date_immatriculation": "1998-03-27", - "date_immatriculation_timestamp": 890953200, - "date_extrait": "21 AVRIL 2017", - "observations": [ + 'data': { + 'siren': '418166096', + 'date_immatriculation': '1998-03-27', + 'date_immatriculation_timestamp': 890953200, + 'date_extrait': '21 AVRIL 2017', + 'observations': [ { - "date": "2000-02-23", - "date_timestamp": 951260400, - "numero": "12197", - "libelle": " LA SOCIETE NE CONSERVE AUCUNE ACTIVITE A SON ANCIEN SIEGE ", + 'date': '2000-02-23', + 'date_timestamp': 951260400, + 'numero': '12197', + 'libelle': ' LA SOCIETE NE CONSERVE AUCUNE ACTIVITE A SON ANCIEN SIEGE ', } ], } } ASSOCIATIONS_RESPONSE = { - "data": { - "rna_id": "W751135389", - "titre": "ALLIANCE DU COEUR: UNION NATIONALE DES FEDERATIONS ET ASSOCIATIONS DE MALADES CARDIOVASCULAIRES", - "objet": "information, soutien, solidarité et accompagnement psycho médico social des personnes malades cardiovasculaires et de leurs proches...", - "siret": "42135938100025", - "siret_siege_social": "42135938100033", - "date_creation": "1993-02-11", - "date_declaration": "2013-06-28", - "date_publication": "1993-03-03", - "adresse_siege": { - "numero_voie": "10", - "type_voie": "RUE", - "libelle_voie": "Lebouis", - "code_insee": "75120", - "code_postal": ["75014"], - "commune": "Paris", + 'data': { + 'rna_id': 'W751135389', + 'titre': 'ALLIANCE DU COEUR: UNION NATIONALE DES FEDERATIONS ET ASSOCIATIONS DE MALADES CARDIOVASCULAIRES', + 'objet': 'information, soutien, solidarité et accompagnement psycho médico social des personnes malades cardiovasculaires et de leurs proches...', + 'siret': '42135938100025', + 'siret_siege_social': '42135938100033', + 'date_creation': '1993-02-11', + 'date_declaration': '2013-06-28', + 'date_publication': '1993-03-03', + 'adresse_siege': { + 'numero_voie': '10', + 'type_voie': 'RUE', + 'libelle_voie': 'Lebouis', + 'code_insee': '75120', + 'code_postal': ['75014'], + 'commune': 'Paris', }, - "groupement": "Simple", - "mise_a_jour": "2013-06-28", + 'groupement': 'Simple', + 'mise_a_jour': '2013-06-28', } } DOCUMENTS_ASSOCIATION_RESPONSE = { - "meta": {"nombre_documents": 2, "nombre_documents_deficients": 0}, - "data": [ + 'meta': {'nombre_documents': 2, 'nombre_documents_deficients': 0}, + 'data': [ { - "data": { - "type": "Statuts", - "url": "https://apientreprise.fr/attestations/40ab0b07d434d0417e8997ce7c5afbef/attestation_document_association.pdf", - "timestamp": "1500660325", + 'data': { + 'type': 'Statuts', + 'url': 'https://apientreprise.fr/attestations/40ab0b07d434d0417e8997ce7c5afbef/attestation_document_association.pdf', + 'timestamp': '1500660325', }, }, { - "data": { - "type": "Récépissé", - "url": "https://apientreprise.fr/attestations/40ab0b07d434d0417e8997ce7c5afbef/recepisse_association.pdf", - "timestamp": "1500667325", + 'data': { + 'type': 'Récépissé', + 'url': 'https://apientreprise.fr/attestations/40ab0b07d434d0417e8997ce7c5afbef/recepisse_association.pdf', + 'timestamp': '1500667325', }, }, { - "data": { - "timestamp": "1337158058", - "url": "https://apientreprise.fr/attestations/40ab0b07d434d0417e8997ce7c5afbef/attestation_document_association.pdf", - "type": "Statuts", + 'data': { + 'timestamp': '1337158058', + 'url': 'https://apientreprise.fr/attestations/40ab0b07d434d0417e8997ce7c5afbef/attestation_document_association.pdf', + 'type': 'Statuts', }, }, ], @@ -227,15 +227,15 @@ DOCUMENTS_ASSOCIATION_RESPONSE = { EXERCICES_RESPONSE = { - "data": [ - {"data": {"chiffre_affaires": 900001, "date_fin_exercice": "2015-12-01"}, "links": {}, "meta": {}} + 'data': [ + {'data': {'chiffre_affaires': 900001, 'date_fin_exercice': '2015-12-01'}, 'links': {}, 'meta': {}} ], - "meta": {}, - "links": {}, + 'meta': {}, + 'links': {}, } -DOCUMENT_ASSOCIATION_RESPONSE = "binary content" +DOCUMENT_ASSOCIATION_RESPONSE = 'binary content' REQUEST_PARAMS = {'context': 'MSP', 'object': 'demand', 'recipient': 'siret'} @@ -615,7 +615,7 @@ def test_document_association(app, resource, freezer): document['url'], params={'context': 'MSP', 'object': 'demand', 'recipient': 'siret'}, status=200 ) # try to get document with wrong signature - wrong_url = document['url'] + "wrong/" + wrong_url = document['url'] + 'wrong/' app.get(wrong_url, status=404) # try expired url @@ -662,7 +662,7 @@ def test_no_json_error(app, resource): assert response.json['err'] == 1 assert ( response.json['err_desc'] - == "API-entreprise returned non-JSON content with status 200: simple text" + == 'API-entreprise returned non-JSON content with status 200: simple text' ) diff --git a/tests/test_api_particulier.py b/tests/test_api_particulier.py index d1ccc1d2..96cf5873 100644 --- a/tests/test_api_particulier.py +++ b/tests/test_api_particulier.py @@ -27,62 +27,62 @@ from tests.test_manager import login from tests.utils import endpoint_get, make_resource SVAIR_RESPONSE = { - "declarant1": { - "nom": "Martin", - "nomNaissance": "Martin", - "prenoms": "Pierre", - "dateNaissance": "22/03/1985", + 'declarant1': { + 'nom': 'Martin', + 'nomNaissance': 'Martin', + 'prenoms': 'Pierre', + 'dateNaissance': '22/03/1985', }, - "declarant2": { - "nom": "Martin", - "nomNaissance": "Honore", - "prenoms": "Marie", - "dateNaissance": "03/04/1986", + 'declarant2': { + 'nom': 'Martin', + 'nomNaissance': 'Honore', + 'prenoms': 'Marie', + 'dateNaissance': '03/04/1986', }, - "foyerFiscal": {"annee": 2015, "adresse": "12 rue Balzac 75008 Paris"}, - "dateRecouvrement": "10/10/2015", - "dateEtablissement": "08/07/2015", - "nombreParts": 2, - "situationFamille": "Marié(e)s", - "nombrePersonnesCharge": 2, - "revenuBrutGlobal": 29880, - "revenuImposable": 29880, - "impotRevenuNetAvantCorrections": 2165, - "montantImpot": 2165, - "revenuFiscalReference": 29880, - "anneeImpots": "2015", - "anneeRevenus": "2014", + 'foyerFiscal': {'annee': 2015, 'adresse': '12 rue Balzac 75008 Paris'}, + 'dateRecouvrement': '10/10/2015', + 'dateEtablissement': '08/07/2015', + 'nombreParts': 2, + 'situationFamille': 'Marié(e)s', + 'nombrePersonnesCharge': 2, + 'revenuBrutGlobal': 29880, + 'revenuImposable': 29880, + 'impotRevenuNetAvantCorrections': 2165, + 'montantImpot': 2165, + 'revenuFiscalReference': 29880, + 'anneeImpots': '2015', + 'anneeRevenus': '2014', } CAF_FAMILLE = { - "adresse": { - "codePostalVille": "12345 CONDAT", - "complementIdentiteGeo": "ESCALIER B", - "identite": "Madame MARIE DUPONT", - "numeroRue": "123 RUE BIDON", - "pays": "FRANCE", + 'adresse': { + 'codePostalVille': '12345 CONDAT', + 'complementIdentiteGeo': 'ESCALIER B', + 'identite': 'Madame MARIE DUPONT', + 'numeroRue': '123 RUE BIDON', + 'pays': 'FRANCE', }, - "allocataires": [ - {"dateDeNaissance": "12111971", "nomPrenom": "MARIE DUPONT", "sexe": "F"}, - {"dateDeNaissance": "18101969", "nomPrenom": "JEAN DUPONT", "sexe": "M"}, + 'allocataires': [ + {'dateDeNaissance': '12111971', 'nomPrenom': 'MARIE DUPONT', 'sexe': 'F'}, + {'dateDeNaissance': '18101969', 'nomPrenom': 'JEAN DUPONT', 'sexe': 'M'}, ], - "annee": 2017, - "enfants": [{"dateDeNaissance": "11122016", "nomPrenom": "LUCIE DUPONT", "sexe": "F"}], - "mois": 4, - "quotientFamilial": 1754, + 'annee': 2017, + 'enfants': [{'dateDeNaissance': '11122016', 'nomPrenom': 'LUCIE DUPONT', 'sexe': 'F'}], + 'mois': 4, + 'quotientFamilial': 1754, } INTROSPECT = { - "_id": "1d99db5a-a099-4314-ad2f-2707c6b505a6", - "name": "Application de sandbox", - "scopes": [ - "dgfip_avis_imposition", - "dgfip_adresse", - "cnaf_allocataires", - "cnaf_enfants", - "cnaf_adresse", - "cnaf_quotient_familial", - "mesri_statut_etudiant", + '_id': '1d99db5a-a099-4314-ad2f-2707c6b505a6', + 'name': 'Application de sandbox', + 'scopes': [ + 'dgfip_avis_imposition', + 'dgfip_adresse', + 'cnaf_allocataires', + 'cnaf_enfants', + 'cnaf_adresse', + 'cnaf_quotient_familial', + 'mesri_statut_etudiant', ], } @@ -436,13 +436,13 @@ def test_scopes(app, resource, mock_api_particulier): assert not resource.accessible_scopes resp = endpoint_get('/api-particulier/test/scopes', app, resource, 'scopes') assert resp.json['data'] == [ - "cnaf_adresse", - "cnaf_allocataires", - "cnaf_enfants", - "cnaf_quotient_familial", - "dgfip_adresse", - "dgfip_avis_imposition", - "mesri_statut_etudiant", + 'cnaf_adresse', + 'cnaf_allocataires', + 'cnaf_enfants', + 'cnaf_quotient_familial', + 'dgfip_adresse', + 'dgfip_avis_imposition', + 'mesri_statut_etudiant', ] assert len(APIParticulier.objects.get(slug=resource.slug).accessible_scopes) == 7 diff --git a/tests/test_arcgis.py b/tests/test_arcgis.py index 05ca6742..aea8e428 100644 --- a/tests/test_arcgis.py +++ b/tests/test_arcgis.py @@ -157,9 +157,9 @@ TOKEN = '''{ }''' ERRORS = [ - ({"error": {"message": "crash message"}}, 'crash message'), - ({"error": {"foo": "bar"}}, 'unknown ArcGIS/token error'), - ({"error": "string crash"}, 'unknown ArcGIS/token error'), + ({'error': {'message': 'crash message'}}, 'crash message'), + ({'error': {'foo': 'bar'}}, 'unknown ArcGIS/token error'), + ({'error': 'string crash'}, 'unknown ArcGIS/token error'), ] @@ -658,7 +658,7 @@ def test_arcgis_with_token_query(app, arcgis): tests.utils.FakedResponse(content=INFO, status_code=200), ] requests_post.side_effect = [ - tests.utils.FakedResponse(content="{}", status_code=200), # no token + tests.utils.FakedResponse(content='{}', status_code=200), # no token ] resp = app.get(endpoint, params=params, status=200) assert requests_get.call_count == 1 @@ -739,12 +739,12 @@ def query(arcgis): def test_query_q_method(arcgis, query, rf): arcgis_response = { - "features": [ + 'features': [ { - "attributes": { - "ident": "1234", - "address": "rue du calvaire", - "codepost": 13200, + 'attributes': { + 'ident': '1234', + 'address': 'rue du calvaire', + 'codepost': 13200, }, 'geo': {}, }, @@ -756,15 +756,15 @@ def test_query_q_method(arcgis, query, rf): content=json.dumps(arcgis_response), status_code=200 ) assert query.q(rf.get('/', data={'adress': "AVENUE D'ANNAM"}), full=True) == { - "data": [ + 'data': [ { - "attributes": {"ident": "1234", "address": "rue du calvaire", "codepost": 13200}, - "geo": {}, - "id": "1234", - "text": "rue du calvaire - 13200", + 'attributes': {'ident': '1234', 'address': 'rue du calvaire', 'codepost': 13200}, + 'geo': {}, + 'id': '1234', + 'text': 'rue du calvaire - 13200', } ], - "metadata": {"meta": {}}, + 'metadata': {'meta': {}}, } assert requests_get.call_count == 1 assert ( diff --git a/tests/test_arpege_ecp.py b/tests/test_arpege_ecp.py index 14d54411..a8101647 100644 --- a/tests/test_arpege_ecp.py +++ b/tests/test_arpege_ecp.py @@ -94,12 +94,12 @@ def test_get_access_token(connector): assert ' 404 ' in str(error.value) - with tests.utils.mock_url(response="content", status_code=200): + with tests.utils.mock_url(response='content', status_code=200): with pytest.raises(APIError) as error: token = connector.get_access_token('nameid') assert 'no JSON content' in str(error.value) - with tests.utils.mock_url(response="content", status_code=200): + with tests.utils.mock_url(response='content', status_code=200): with pytest.raises(APIError) as error: token = connector.get_access_token('nameid') assert 'no JSON content' in str(error.value) diff --git a/tests/test_astech.py b/tests/test_astech.py index ec569330..f547b0c7 100644 --- a/tests/test_astech.py +++ b/tests/test_astech.py @@ -79,69 +79,69 @@ POSITIONS_RESPONSE = """ """ -@mock.patch("passerelle.utils.Request.request") +@mock.patch('passerelle.utils.Request.request') def test_connections(mocked_request, app, setup): mocked_request.return_value = tests.utils.FakedResponse(content=CONNECTION_RESPONSE, status_code=200) endpoint = reverse( - "generic-endpoint", - kwargs={"connector": "astech", "slug": setup.slug, "endpoint": "connections"}, + 'generic-endpoint', + kwargs={'connector': 'astech', 'slug': setup.slug, 'endpoint': 'connections'}, ) response = app.get(endpoint) assert mocked_request.call_args[0][0] == 'get' - assert mocked_request.call_args[0][1].endswith("connection/all") + assert mocked_request.call_args[0][1].endswith('connection/all') assert mocked_request.call_count == 1 - assert response.json["err"] == 0 - assert response.json["data"]["default"] == "TEST" + assert response.json['err'] == 0 + assert response.json['data']['default'] == 'TEST' - ASTech.objects.update(connection="OTHER") + ASTech.objects.update(connection='OTHER') response = app.get(endpoint) - assert response.json["err"] == 1 + assert response.json['err'] == 1 - ASTech.objects.update(connection="TEST") + ASTech.objects.update(connection='TEST') response = app.get(endpoint) - assert response.json["err"] == 0 - assert response.json["data"]["default"] == "TEST" + assert response.json['err'] == 0 + assert response.json['data']['default'] == 'TEST' # bad response mocked_request.return_value = tests.utils.FakedResponse( - content='{"msg": "not found"}', status_code=404, reason="Not Found" + content='{"msg": "not found"}', status_code=404, reason='Not Found' ) response = app.get(endpoint) - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "AS-TECH response: 404 Not Found" - assert response.json["data"]["error"]["status"] == 404 - assert response.json["data"]["error"]["content"]["msg"] == "not found" + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'AS-TECH response: 404 Not Found' + assert response.json['data']['error']['status'] == 404 + assert response.json['data']['error']['content']['msg'] == 'not found' mocked_request.return_value = tests.utils.FakedResponse( - content="crash", status_code=500, reason="Crashhhh" + content='crash', status_code=500, reason='Crashhhh' ) response = app.get(endpoint) - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "AS-TECH response: 500 Crashhhh" - assert response.json["data"]["error"]["status"] == 500 - assert response.json["data"]["error"]["content"] == "crash" - mocked_request.return_value = tests.utils.FakedResponse(content="not json", status_code=200, reason="OK") + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'AS-TECH response: 500 Crashhhh' + assert response.json['data']['error']['status'] == 500 + assert response.json['data']['error']['content'] == 'crash' + mocked_request.return_value = tests.utils.FakedResponse(content='not json', status_code=200, reason='OK') response = app.get(endpoint) - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"].startswith("invalid JSON in response:") + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'].startswith('invalid JSON in response:') mocked_request.side_effect = ConnectionError('mocked error', request=Request()) response = app.get(endpoint) - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == 'connection error: mocked error' + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'connection error: mocked error' -@mock.patch("passerelle.utils.Request.request") +@mock.patch('passerelle.utils.Request.request') def test_authorization(mocked_request, app, setup): mocked_request.side_effect = [ tests.utils.FakedResponse(content=CONNECTION_RESPONSE, status_code=200), tests.utils.FakedResponse(content=AUTH_RESPONSE, status_code=200), ] endpoint = reverse( - "generic-endpoint", - kwargs={"connector": "astech", "slug": setup.slug, "endpoint": "authorization"}, + 'generic-endpoint', + kwargs={'connector': 'astech', 'slug': setup.slug, 'endpoint': 'authorization'}, ) response = app.get(endpoint) assert mocked_request.call_count == 2 @@ -158,13 +158,13 @@ def test_authorization(mocked_request, app, setup): assert mocked_request.call_count == 2 -@mock.patch("passerelle.utils.Request.request") -@mock.patch("passerelle.apps.astech.models.ASTech.get_authorization") +@mock.patch('passerelle.utils.Request.request') +@mock.patch('passerelle.apps.astech.models.ASTech.get_authorization') def test_services(mocked_auth, mocked_request, app, setup): mocked_auth.return_value = {'access_token': '4242', 'connection_id': 'TEST'} endpoint = reverse( - "generic-endpoint", - kwargs={"connector": "astech", "slug": setup.slug, "endpoint": "services"}, + 'generic-endpoint', + kwargs={'connector': 'astech', 'slug': setup.slug, 'endpoint': 'services'}, ) mocked_request.return_value = tests.utils.FakedResponse(content=SERVICES_RESPONSE, status_code=200) @@ -176,19 +176,19 @@ def test_services(mocked_auth, mocked_request, app, setup): assert mocked_request.call_args[1]['params']['connection_id'] == 'TEST' assert response.json['err'] == 0 assert response.json['data'] == [ - {"id": "123", "text": "123 / FOOBAR"}, - {"id": "ABC", "text": "ABC / AH BE CE"}, - {"id": "XFO", "text": "XFO / BARFOO"}, + {'id': '123', 'text': '123 / FOOBAR'}, + {'id': 'ABC', 'text': 'ABC / AH BE CE'}, + {'id': 'XFO', 'text': 'XFO / BARFOO'}, ] -@mock.patch("passerelle.utils.Request.request") -@mock.patch("passerelle.apps.astech.models.ASTech.get_authorization") +@mock.patch('passerelle.utils.Request.request') +@mock.patch('passerelle.apps.astech.models.ASTech.get_authorization') def test_companies(mocked_auth, mocked_request, app, setup): mocked_auth.return_value = {'access_token': '4242', 'connection_id': 'TEST'} endpoint = reverse( - "generic-endpoint", - kwargs={"connector": "astech", "slug": setup.slug, "endpoint": "companies"}, + 'generic-endpoint', + kwargs={'connector': 'astech', 'slug': setup.slug, 'endpoint': 'companies'}, ) mocked_request.return_value = tests.utils.FakedResponse(content=COMPANIES_RESPONSE, status_code=200) @@ -199,24 +199,24 @@ def test_companies(mocked_auth, mocked_request, app, setup): assert mocked_request.call_args[1]['json']['codeDemandeur'] == 'ENTROUV' assert response.json['err'] == 0 assert response.json['data'] == [ - {"id": "01", "text": "01 / SERVICES TECHNIQUES"}, - {"id": "10", "text": "10 / DIRECTION BATIMENT"}, - {"id": "11", "text": "11 / PLOMBERIE"}, + {'id': '01', 'text': '01 / SERVICES TECHNIQUES'}, + {'id': '10', 'text': '10 / DIRECTION BATIMENT'}, + {'id': '11', 'text': '11 / PLOMBERIE'}, ] - mocked_request.return_value = tests.utils.FakedResponse(content="[]", status_code=200) + mocked_request.return_value = tests.utils.FakedResponse(content='[]', status_code=200) response = app.get(endpoint) assert response.json['err'] == 1 assert response.json['err_desc'] == 'Invalid response: []' -@mock.patch("passerelle.utils.Request.request") -@mock.patch("passerelle.apps.astech.models.ASTech.get_authorization") +@mock.patch('passerelle.utils.Request.request') +@mock.patch('passerelle.apps.astech.models.ASTech.get_authorization') def test_labels(mocked_auth, mocked_request, app, setup): mocked_auth.return_value = {'access_token': '4242', 'connection_id': 'TEST'} endpoint = reverse( - "generic-endpoint", - kwargs={"connector": "astech", "slug": setup.slug, "endpoint": "labels"}, + 'generic-endpoint', + kwargs={'connector': 'astech', 'slug': setup.slug, 'endpoint': 'labels'}, ) mocked_request.side_effect = [ @@ -232,10 +232,10 @@ def test_labels(mocked_auth, mocked_request, app, setup): assert mocked_request.call_args_list[1][1]['json'] == {'societeDemandeur': '99'} assert response.json['err'] == 0 assert response.json['data'] == [ - {"id": "1", "text": "1 / CHANGEMENT AMPOULE"}, - {"id": "2", "text": "2 / FUITE"}, - {"id": "3", "text": "3 / SERRURE CASSEE"}, - {"id": "4", "text": "4 / WC BOUCHE"}, + {'id': '1', 'text': '1 / CHANGEMENT AMPOULE'}, + {'id': '2', 'text': '2 / FUITE'}, + {'id': '3', 'text': '3 / SERRURE CASSEE'}, + {'id': '4', 'text': '4 / WC BOUCHE'}, ] mocked_request.reset_mock(side_effect=True) @@ -248,19 +248,19 @@ def test_labels(mocked_auth, mocked_request, app, setup): assert response.json['err'] == 0 assert len(response.json['data']) == 4 - mocked_request.return_value = tests.utils.FakedResponse(content="[]", status_code=200) + mocked_request.return_value = tests.utils.FakedResponse(content='[]', status_code=200) response = app.get(endpoint) assert response.json['err'] == 1 assert response.json['err_desc'] == 'Invalid response: []' -@mock.patch("passerelle.utils.Request.request") -@mock.patch("passerelle.apps.astech.models.ASTech.get_authorization") +@mock.patch('passerelle.utils.Request.request') +@mock.patch('passerelle.apps.astech.models.ASTech.get_authorization') def test_parameter(mocked_auth, mocked_request, app, setup): mocked_auth.return_value = {'access_token': '4242', 'connection_id': 'TEST'} endpoint = reverse( - "generic-endpoint", - kwargs={"connector": "astech", "slug": setup.slug, "endpoint": "parameter"}, + 'generic-endpoint', + kwargs={'connector': 'astech', 'slug': setup.slug, 'endpoint': 'parameter'}, ) mocked_request.side_effect = [ @@ -288,13 +288,13 @@ def test_parameter(mocked_auth, mocked_request, app, setup): response = app.get(endpoint, status=400) -@mock.patch("passerelle.utils.Request.request") -@mock.patch("passerelle.apps.astech.models.ASTech.get_authorization") +@mock.patch('passerelle.utils.Request.request') +@mock.patch('passerelle.apps.astech.models.ASTech.get_authorization') def test_create_demand(mocked_auth, mocked_request, app, setup): mocked_auth.return_value = {'access_token': '4242', 'connection_id': 'TEST'} endpoint = reverse( - "generic-endpoint", - kwargs={"connector": "astech", "slug": setup.slug, "endpoint": "create-demand"}, + 'generic-endpoint', + kwargs={'connector': 'astech', 'slug': setup.slug, 'endpoint': 'create-demand'}, ) mocked_request.return_value = tests.utils.FakedResponse(content=CREATE_DEMAND_RESPONSE, status_code=201) @@ -357,8 +357,8 @@ def test_create_demand(mocked_auth, mocked_request, app, setup): # add a document mocked_request.reset_mock() endpoint = reverse( - "generic-endpoint", - kwargs={"connector": "astech", "slug": setup.slug, "endpoint": "add-document"}, + 'generic-endpoint', + kwargs={'connector': 'astech', 'slug': setup.slug, 'endpoint': 'add-document'}, ) mocked_request.return_value = tests.utils.FakedResponse(content=ADD_DOCUMENT_RESPONSE, status_code=201) document = { @@ -378,18 +378,18 @@ def test_create_demand(mocked_auth, mocked_request, app, setup): assert mocked_request.call_args[1]['params']['docFile'] == 'test.txt' assert mocked_request.call_args[1]['files'] == {'file0': ('test.txt', b'foo\n', 'text/plain')} assert response.json['err'] == 0 - assert response.json['data'] == "" + assert response.json['data'] == '' -@mock.patch("passerelle.utils.Request.request") -@mock.patch("passerelle.apps.astech.models.ASTech.get_authorization") +@mock.patch('passerelle.utils.Request.request') +@mock.patch('passerelle.apps.astech.models.ASTech.get_authorization') def test_positions(mocked_auth, mocked_request, app, setup): mocked_auth.return_value = {'access_token': '4242', 'connection_id': 'TEST'} # position of a demand endpoint = reverse( - "generic-endpoint", - kwargs={"connector": "astech", "slug": setup.slug, "endpoint": "demand-position"}, + 'generic-endpoint', + kwargs={'connector': 'astech', 'slug': setup.slug, 'endpoint': 'demand-position'}, ) mocked_request.return_value = tests.utils.FakedResponse(content=POSITION_RESPONSE, status_code=200) response = app.get(endpoint + '?demand_id=000000000001234', status=200) @@ -398,11 +398,11 @@ def test_positions(mocked_auth, mocked_request, app, setup): assert mocked_request.call_args[0][1].endswith('apicli/demande/position/000000000001234') assert response.json['err'] == 0 assert response.json['data'] == { - "position": "E", - "positionLib": "Envoi atelier", - "info": None, - "id": "E", - "text": "Envoi atelier", + 'position': 'E', + 'positionLib': 'Envoi atelier', + 'info': None, + 'id': 'E', + 'text': 'Envoi atelier', } # invalid AS-TECH response @@ -418,8 +418,8 @@ def test_positions(mocked_auth, mocked_request, app, setup): # get all possible positions mocked_request.reset_mock() endpoint = reverse( - "generic-endpoint", - kwargs={"connector": "astech", "slug": setup.slug, "endpoint": "demand-all-positions"}, + 'generic-endpoint', + kwargs={'connector': 'astech', 'slug': setup.slug, 'endpoint': 'demand-all-positions'}, ) mocked_request.return_value = tests.utils.FakedResponse(content=POSITIONS_RESPONSE, status_code=200) response = app.get(endpoint, status=200) @@ -429,9 +429,9 @@ def test_positions(mocked_auth, mocked_request, app, setup): assert response.json['err'] == 0 assert len(response.json['data']) == 11 assert response.json['data'][0] == { - "position": "A", - "positionLib": "En attente", - "color": "0, 0, 0", - "id": "A", - "text": "En attente", + 'position': 'A', + 'positionLib': 'En attente', + 'color': '0, 0, 0', + 'id': 'A', + 'text': 'En attente', } diff --git a/tests/test_astre_rest.py b/tests/test_astre_rest.py index 9593a2ba..ea74932b 100644 --- a/tests/test_astre_rest.py +++ b/tests/test_astre_rest.py @@ -196,8 +196,8 @@ def test_gf_documents_gedmanager_document_create(app, connector): assert len(responses.calls) == 1 made_request = responses.calls[0].request assert made_request.headers['content-type'].startswith('multipart/form-data') - _, pdict = cgi.parse_header(made_request.headers["content-type"]) - pdict["boundary"] = bytes(pdict["boundary"], "utf-8") + _, pdict = cgi.parse_header(made_request.headers['content-type']) + pdict['boundary'] = bytes(pdict['boundary'], 'utf-8') pdict['CONTENT-LENGTH'] = made_request.headers['Content-Length'] postvars = cgi.parse_multipart(io.BytesIO(made_request.body), pdict) assert postvars['nomFichier'] == ['somefile.ext'] @@ -306,8 +306,8 @@ def test_gf_documents_gedmanager_document_update(app, connector): assert len(responses.calls) == 1 made_request = responses.calls[0].request assert made_request.headers['content-type'].startswith('multipart/form-data') - _, pdict = cgi.parse_header(made_request.headers["content-type"]) - pdict["boundary"] = bytes(pdict["boundary"], "utf-8") + _, pdict = cgi.parse_header(made_request.headers['content-type']) + pdict['boundary'] = bytes(pdict['boundary'], 'utf-8') pdict['CONTENT-LENGTH'] = made_request.headers['Content-Length'] postvars = cgi.parse_multipart(io.BytesIO(made_request.body), pdict) assert postvars['nomFichier'] == ['somefile.ext'] diff --git a/tests/test_base_adresse.py b/tests/test_base_adresse.py index 61eda666..1cd163d7 100644 --- a/tests/test_base_adresse.py +++ b/tests/test_base_adresse.py @@ -23,29 +23,29 @@ from passerelle.apps.base_adresse.models import ( FAKED_CONTENT = json.dumps( { - "limit": 1, - "attribution": "BAN", - "version": "draft", - "licence": "ODbL 1.0", - "query": "plop", - "type": "FeatureCollection", - "features": [ + 'limit': 1, + 'attribution': 'BAN', + 'version': 'draft', + 'licence': 'ODbL 1.0', + 'query': 'plop', + 'type': 'FeatureCollection', + 'features': [ { - "geometry": {"type": "Point", "coordinates": [-0.593775, 47.474633]}, - "properties": { - "citycode": "49007", - "name": "Rue Roger Halope", - "id": "49007_6950_be54bd", - "city": "Angers", - "context": "49, Maine-et-Loire, Pays de la Loire", - "score": 0.14097272727272728, - "label": "Rue Roger Halope 49000 Angers", - "postcode": "49000", - "type": "street", - "info1": "xxx", - "info2": "yyy", + 'geometry': {'type': 'Point', 'coordinates': [-0.593775, 47.474633]}, + 'properties': { + 'citycode': '49007', + 'name': 'Rue Roger Halope', + 'id': '49007_6950_be54bd', + 'city': 'Angers', + 'context': '49, Maine-et-Loire, Pays de la Loire', + 'score': 0.14097272727272728, + 'label': 'Rue Roger Halope 49000 Angers', + 'postcode': '49000', + 'type': 'street', + 'info1': 'xxx', + 'info2': 'yyy', }, - "type": "Feature", + 'type': 'Feature', } ], } @@ -55,34 +55,34 @@ FAKE_DATA = '' FAKE_API_GEO_LIST = [ { - "code": "75056", - "codeDepartement": "75", - "codeRegion": "11", - "codesPostaux": [ - "75001", - "75002", + 'code': '75056', + 'codeDepartement': '75', + 'codeRegion': '11', + 'codesPostaux': [ + '75001', + '75002', ], - "nom": "Paris", - "population": 2190327, + 'nom': 'Paris', + 'population': 2190327, }, - {"code": "97501", "codesPostaux": ["97500"], "nom": "Miquelon-Langlade", "population": 596}, + {'code': '97501', 'codesPostaux': ['97500'], 'nom': 'Miquelon-Langlade', 'population': 596}, ] FAKE_API_GEO = json.dumps(FAKE_API_GEO_LIST) FAKE_API_GEO_DEPARTMENTS = json.dumps( [ - {"code": "75", "codeRegion": "11", "nom": "Paris"}, + {'code': '75', 'codeRegion': '11', 'nom': 'Paris'}, { - "code": "58", - "codeRegion": "27", - "nom": "Nièvre", + 'code': '58', + 'codeRegion': '27', + 'nom': 'Nièvre', }, ] ) FAKE_API_GEO_REGIONS = json.dumps( - [{"code": "11", "nom": "Île-de-France"}, {"code": "27", "nom": "Bourgogne-Franche-Comté"}] + [{'code': '11', 'nom': 'Île-de-France'}, {'code': '27', 'nom': 'Bourgogne-Franche-Comté'}] ) @@ -312,7 +312,7 @@ def test_base_adresse_search_qs_parameters_error(app, base_adresse, mock_api_adr @mock.patch('passerelle.utils.Request.get') def test_base_adresse_search_api_error(mocked_get, app, base_adresse): def raise_for_status(): - raise HTTPError("400 Client Error: Bad Request for url: xxx") + raise HTTPError('400 Client Error: Bad Request for url: xxx') response = tests.utils.FakedResponse(content=json.dumps({'title': 'error'}), status_code=400) response.raise_for_status = raise_for_status @@ -350,19 +350,19 @@ def test_base_adresse_reverse_having_several(mocked_get, app, base_adresse): content = json.loads(FAKED_CONTENT) content['features'].append( { - "geometry": {"type": "Point", "coordinates": [-0.593775, 47.474633]}, - "properties": { - "citycode": "49007", - "name": "Rue Eugène Bardon", - "id": "49007_6950_aaaaa", - "city": "Angers", - "context": "49, Maine-et-Loire, Pays de la Loire", - "score": 0.2, - "label": "Rue Eugène Bardon 49000 Angers", - "postcode": "49000", - "type": "street", + 'geometry': {'type': 'Point', 'coordinates': [-0.593775, 47.474633]}, + 'properties': { + 'citycode': '49007', + 'name': 'Rue Eugène Bardon', + 'id': '49007_6950_aaaaa', + 'city': 'Angers', + 'context': '49, Maine-et-Loire, Pays de la Loire', + 'score': 0.2, + 'label': 'Rue Eugène Bardon 49000 Angers', + 'postcode': '49000', + 'type': 'street', }, - "type": "Feature", + 'type': 'Feature', } ) faked_content = json.dumps(content) diff --git a/tests/test_caluire_axel.py b/tests/test_caluire_axel.py index 68c33de0..3cef3459 100644 --- a/tests/test_caluire_axel.py +++ b/tests/test_caluire_axel.py @@ -250,7 +250,7 @@ def test_operation_status_error(resource): def test_link_endpoint_nameid_empty(app, resource, link_params): resp = app.post_json('/caluire-axel/test/link?NameID=', params=link_params, status=400) - assert resp.json['err_desc'] == "NameID is empty" + assert resp.json['err_desc'] == 'NameID is empty' assert resp.json['err'] == 'bad-request' @@ -258,7 +258,7 @@ def test_link_endpoint_axel_error(app, resource, link_params): with mock.patch('passerelle.contrib.caluire_axel.schemas.find_individus') as operation: operation.side_effect = AxelError('FooBar') resp = app.post_json('/caluire-axel/test/link?NameID=yyy', params=link_params) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' assert resp.json['data'] == {'xml_request': None, 'xml_response': None} @@ -280,7 +280,7 @@ def test_link_endpoint_axel_error(app, resource, link_params): with mock.patch('xmlschema.XMLSchema.validate') as xml_validate: xml_validate.side_effect = xmlschema.XMLSchemaValidationError(None, None) resp = app.post_json('/caluire-axel/test/link?NameID=yyy', params=link_params) - assert resp.json['err_desc'].startswith("Axel error: invalid request") + assert resp.json['err_desc'].startswith('Axel error: invalid request') assert resp.json['err'] == 'error' assert resp.json['data']['xml_request'] == xml_request assert resp.json['data']['xml_response'] is None @@ -348,7 +348,7 @@ def test_link_endpoint_axel_error(app, resource, link_params): with mock.patch('passerelle.contrib.utils.axel.AxelSchema.decode') as decode: decode.side_effect = xmlschema.XMLSchemaValidationError(None, None) resp = app.post_json('/caluire-axel/test/link?NameID=yyy', params=link_params) - assert resp.json['err_desc'].startswith("Axel error: invalid response") + assert resp.json['err_desc'].startswith('Axel error: invalid response') assert resp.json['err'] == 'error' assert resp.json['data']['xml_request'] == xml_request assert resp.json['data']['xml_response'] == xml_response @@ -356,7 +356,7 @@ def test_link_endpoint_axel_error(app, resource, link_params): with mock.patch('passerelle.contrib.caluire_axel.models.CaluireAxel.soap_client') as client: client.side_effect = SOAPError('SOAP service is down') resp = app.post_json('/caluire-axel/test/link?NameID=yyy', params=link_params) - assert resp.json['err_desc'] == "SOAP service is down" + assert resp.json['err_desc'] == 'SOAP service is down' @pytest.mark.parametrize( @@ -399,7 +399,7 @@ def test_link_endpoint_no_result(app, resource, link_params, xml_response): ) with mock_data(content, 'FindIndividus'): resp = app.post_json('/caluire-axel/test/link?NameID=yyy', params=link_params) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -420,7 +420,7 @@ def test_link_endpoint_conflict(app, resource, link_params): link = Link.objects.create(resource=resource, name_id='yyy', family_id='YYY', person_id='42') with mock_data(content, 'FindIndividus'): resp = app.post_json('/caluire-axel/test/link?NameID=yyy', params=link_params) - assert resp.json['err_desc'] == "Data conflict" + assert resp.json['err_desc'] == 'Data conflict' assert resp.json['err'] == 'conflict' # existing link but person_id is wrong @@ -429,7 +429,7 @@ def test_link_endpoint_conflict(app, resource, link_params): link.save() with mock_data(content, 'FindIndividus'): resp = app.post_json('/caluire-axel/test/link?NameID=yyy', params=link_params) - assert resp.json['err_desc'] == "Data conflict" + assert resp.json['err_desc'] == 'Data conflict' assert resp.json['err'] == 'conflict' @@ -496,7 +496,7 @@ def test_link_endpoint_having_homonyme_with_empty_familly(app, resource, link_pa def test_unlink_endpoint_no_result(app, resource): resp = app.post('/caluire-axel/test/unlink?NameID=yyy') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -515,13 +515,13 @@ def test_family_info_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.caluire_axel.schemas.get_famille_individus') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/caluire-axel/test/family_info?NameID=yyy') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' def test_family_info_endpoint_no_result(app, resource): resp = app.get('/caluire-axel/test/family_info?NameID=yyy') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -571,13 +571,13 @@ def test_children_info_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.caluire_axel.schemas.get_famille_individus') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/caluire-axel/test/children_info?NameID=yyy') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' def test_children_info_endpoint_no_result(app, resource): resp = app.get('/caluire-axel/test/children_info?NameID=yyy') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -609,13 +609,13 @@ def test_child_info_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.caluire_axel.schemas.get_famille_individus') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/caluire-axel/test/child_info?NameID=yyy&idpersonne=50632') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' def test_child_info_endpoint_no_result(app, resource): resp = app.get('/caluire-axel/test/child_info?NameID=yyy&idpersonne=50632') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', family_id='XXX', person_id='42') @@ -624,7 +624,7 @@ def test_child_info_endpoint_no_result(app, resource): content = xml.read() with mock_data(content, 'GetFamilleIndividus'): resp = app.get('/caluire-axel/test/child_info?NameID=yyy&idpersonne=zzz') - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' @@ -668,7 +668,7 @@ def test_school_list_endpoint_axel_error(app, resource): resp = app.get( '/caluire-axel/test/school_list?num=42&street=street=rue%20Pasteur&zipcode=69300&city=Caluire%20et%20Cuire&schooling_date=2021-05-10' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -696,7 +696,7 @@ def test_child_schooling_info_endpoint_axel_error(app, resource): resp = app.get( '/caluire-axel/test/child_schooling_info?NameID=yyy&idpersonne=50632&schooling_date=2021-05-10' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/family_info.xml') @@ -708,7 +708,7 @@ def test_child_schooling_info_endpoint_axel_error(app, resource): resp = app.get( '/caluire-axel/test/child_schooling_info?NameID=yyy&idpersonne=50632&schooling_date=2021-05-10' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -719,7 +719,7 @@ def test_child_schooling_info_endpoint_bad_date_format(app, resource, value): '/caluire-axel/test/child_schooling_info?NameID=yyy&idpersonne=50632&schooling_date=%s' % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' @@ -727,7 +727,7 @@ def test_child_schooling_info_endpoint_no_result(app, resource): resp = app.get( '/caluire-axel/test/child_schooling_info?NameID=yyy&idpersonne=50632&schooling_date=2021-05-10' ) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', family_id='XXX', person_id='42') @@ -738,7 +738,7 @@ def test_child_schooling_info_endpoint_no_result(app, resource): resp = app.get( '/caluire-axel/test/child_schooling_info?NameID=yyy&idpersonne=zzz&schooling_date=2021-05-10' ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' @@ -766,7 +766,7 @@ def test_child_activities_info_endpoint_axel_error(app, resource): resp = app.get( '/caluire-axel/test/child_activities_info?NameID=yyy&idpersonne=50632&schooling_date=2021-05-10' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/family_info.xml') @@ -778,7 +778,7 @@ def test_child_activities_info_endpoint_axel_error(app, resource): resp = app.get( '/caluire-axel/test/child_activities_info?NameID=yyy&idpersonne=50632&schooling_date=2021-05-10' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -789,7 +789,7 @@ def test_child_activities_info_endpoint_bad_date_format(app, resource, value): '/caluire-axel/test/child_activities_info?NameID=yyy&idpersonne=50632&schooling_date=%s' % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' @@ -797,7 +797,7 @@ def test_child_activities_info_endpoint_no_result(app, resource): resp = app.get( '/caluire-axel/test/child_activities_info?NameID=yyy&idpersonne=50632&schooling_date=2021-05-10' ) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', family_id='XXX', person_id='42') @@ -808,7 +808,7 @@ def test_child_activities_info_endpoint_no_result(app, resource): resp = app.get( '/caluire-axel/test/child_activities_info?NameID=yyy&idpersonne=zzz&schooling_date=2021-05-10' ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' @@ -846,7 +846,7 @@ def test_register_activity_endpoint_axel_error(app, resource, register_activity_ resp = app.post_json( '/caluire-axel/test/register_activity?NameID=yyy', params=register_activity_params ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/family_info.xml') @@ -858,14 +858,14 @@ def test_register_activity_endpoint_axel_error(app, resource, register_activity_ resp = app.post_json( '/caluire-axel/test/register_activity?NameID=yyy', params=register_activity_params ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' def test_register_activity_endpoint_no_result(app, resource, register_activity_params): register_activity_params['child_id'] = 'zzz' resp = app.post_json('/caluire-axel/test/register_activity?NameID=yyy', params=register_activity_params) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', family_id='XXX', person_id='42') @@ -876,7 +876,7 @@ def test_register_activity_endpoint_no_result(app, resource, register_activity_p resp = app.post_json( '/caluire-axel/test/register_activity?NameID=yyy', params=register_activity_params ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' @@ -891,7 +891,7 @@ def test_register_activity_endpoint_date_error(app, resource, register_activity_ resp = app.post_json( '/caluire-axel/test/register_activity?NameID=yyy', params=register_activity_params, status=400 ) - assert resp.json['err_desc'] == "start_date should be before end_date" + assert resp.json['err_desc'] == 'start_date should be before end_date' assert resp.json['err'] == 'bad-request' register_activity_params['registration_end_date'] = '2022-09-01' @@ -899,7 +899,7 @@ def test_register_activity_endpoint_date_error(app, resource, register_activity_ '/caluire-axel/test/register_activity?NameID=yyy', params=register_activity_params, status=400 ) assert ( - resp.json['err_desc'] == "start_date and end_date are in different reference year (2021 != 2022)" + resp.json['err_desc'] == 'start_date and end_date are in different reference year (2021 != 2022)' ) assert resp.json['err'] == 'bad-request' @@ -953,7 +953,7 @@ def test_register_activity_endpoint_wrong_code(app, resource, family_data, regis assert resp.json['err'] == 0 assert resp.json['created'] is False else: - assert resp.json['err_desc'] == "Wrong register-activity status" + assert resp.json['err_desc'] == 'Wrong register-activity status' assert resp.json['err'] == 'register-activity-code-error-%s' % code @@ -964,7 +964,7 @@ def test_get_agenda_endpoint_axel_error(app, resource, family_data): resp = app.get( '/caluire-axel/test/get_agenda?NameID=yyy&idpersonne=50632&activity_id=FOOBAR&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/family_info.xml') @@ -976,7 +976,7 @@ def test_get_agenda_endpoint_axel_error(app, resource, family_data): resp = app.get( '/caluire-axel/test/get_agenda?NameID=yyy&idpersonne=50632&activity_id=FOOBAR&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/activities_info.xml') @@ -992,7 +992,7 @@ def test_get_agenda_endpoint_axel_error(app, resource, family_data): resp = app.get( '/caluire-axel/test/get_agenda?NameID=yyy&idpersonne=50632&activity_id=ELEM&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -1004,14 +1004,14 @@ def test_get_agenda_endpoint_bad_date_format(app, resource, value): % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' resp = app.get( '/caluire-axel/test/get_agenda?NameID=yyy&idpersonne=50632&activity_id=FOOBAR&start_date=2020-09-01&end_date=%s' % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' @@ -1019,7 +1019,7 @@ def test_get_agenda_endpoint_no_result(app, resource, family_data): resp = app.get( '/caluire-axel/test/get_agenda?NameID=yyy&idpersonne=50632&activity_id=FOOBAR&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', family_id='XXX', person_id='42') @@ -1030,7 +1030,7 @@ def test_get_agenda_endpoint_no_result(app, resource, family_data): resp = app.get( '/caluire-axel/test/get_agenda?NameID=yyy&idpersonne=zzz&activity_id=FOOBAR&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/activities_info.xml') @@ -1044,7 +1044,7 @@ def test_get_agenda_endpoint_no_result(app, resource, family_data): resp = app.get( '/caluire-axel/test/get_agenda?NameID=yyy&idpersonne=50632&activity_id=FOOBAR&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Activity not found" + assert resp.json['err_desc'] == 'Activity not found' assert resp.json['err'] == 'not-found' @@ -1058,7 +1058,7 @@ def test_get_agenda_endpoint_date_error(app, resource): '/caluire-axel/test/get_agenda?NameID=yyy&idpersonne=50632&activity_id=FOOBAR&start_date=2021-05-31&end_date=2021-05-30', status=400, ) - assert resp.json['err_desc'] == "start_date should be before end_date" + assert resp.json['err_desc'] == 'start_date should be before end_date' assert resp.json['err'] == 'bad-request' resp = app.get( @@ -1066,7 +1066,7 @@ def test_get_agenda_endpoint_date_error(app, resource): status=400, ) assert ( - resp.json['err_desc'] == "start_date and end_date are in different reference year (2021 != 2022)" + resp.json['err_desc'] == 'start_date and end_date are in different reference year (2021 != 2022)' ) assert resp.json['err'] == 'bad-request' @@ -1474,7 +1474,7 @@ def test_get_agenda_endpoint_wrong_code(app, resource, family_data, activities, resp = app.get( '/caluire-axel/test/get_agenda?NameID=yyy&idpersonne=50632&activity_id=ELEM&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Wrong agenda status" + assert resp.json['err_desc'] == 'Wrong agenda status' assert resp.json['err'] == 'agenda-code-error-%s' % code @@ -1485,7 +1485,7 @@ def test_get_agenda_periscolaire_endpoint_axel_error(app, resource, family_data) resp = app.get( '/caluire-axel/test/get_agenda_periscolaire?NameID=yyy&idpersonne=50632&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/family_info.xml') @@ -1497,7 +1497,7 @@ def test_get_agenda_periscolaire_endpoint_axel_error(app, resource, family_data) resp = app.get( '/caluire-axel/test/get_agenda_periscolaire?NameID=yyy&idpersonne=50632&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/activities_info.xml') @@ -1513,7 +1513,7 @@ def test_get_agenda_periscolaire_endpoint_axel_error(app, resource, family_data) resp = app.get( '/caluire-axel/test/get_agenda_periscolaire?NameID=yyy&idpersonne=50632&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -1525,14 +1525,14 @@ def test_get_agenda_periscolaire_endpoint_bad_date_format(app, resource, value): % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' resp = app.get( '/caluire-axel/test/get_agenda_periscolaire?NameID=yyy&idpersonne=50632&start_date=2020-09-01&end_date=%s' % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' @@ -1540,7 +1540,7 @@ def test_get_agenda_periscolaire_endpoint_no_result(app, resource): resp = app.get( '/caluire-axel/test/get_agenda_periscolaire?NameID=yyy&idpersonne=50632&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', family_id='XXX', person_id='42') @@ -1551,7 +1551,7 @@ def test_get_agenda_periscolaire_endpoint_no_result(app, resource): resp = app.get( '/caluire-axel/test/get_agenda_periscolaire?NameID=yyy&idpersonne=zzz&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' @@ -1565,7 +1565,7 @@ def test_get_agenda_periscolaire_endpoint_date_error(app, resource): '/caluire-axel/test/get_agenda_periscolaire?NameID=yyy&idpersonne=50632&start_date=2021-05-31&end_date=2021-05-30', status=400, ) - assert resp.json['err_desc'] == "start_date should be before end_date" + assert resp.json['err_desc'] == 'start_date should be before end_date' assert resp.json['err'] == 'bad-request' resp = app.get( @@ -1573,7 +1573,7 @@ def test_get_agenda_periscolaire_endpoint_date_error(app, resource): status=400, ) assert ( - resp.json['err_desc'] == "start_date and end_date are in different reference year (2021 != 2022)" + resp.json['err_desc'] == 'start_date and end_date are in different reference year (2021 != 2022)' ) assert resp.json['err'] == 'bad-request' @@ -1696,7 +1696,7 @@ def test_get_agenda_periscolaire_endpoint_wrong_code(app, resource, family_data, resp = app.get( '/caluire-axel/test/get_agenda_periscolaire?NameID=yyy&idpersonne=50632&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Wrong agenda status" + assert resp.json['err_desc'] == 'Wrong agenda status' assert resp.json['err'] == 'agenda-code-error-%s' % code @@ -1707,7 +1707,7 @@ def test_get_agenda_full_endpoint_axel_error(app, resource, family_data): resp = app.get( '/caluire-axel/test/get_agenda_full?NameID=yyy&idpersonne=50632&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/family_info.xml') @@ -1719,7 +1719,7 @@ def test_get_agenda_full_endpoint_axel_error(app, resource, family_data): resp = app.get( '/caluire-axel/test/get_agenda_full?NameID=yyy&idpersonne=50632&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/activities_info.xml') @@ -1735,7 +1735,7 @@ def test_get_agenda_full_endpoint_axel_error(app, resource, family_data): resp = app.get( '/caluire-axel/test/get_agenda_full?NameID=yyy&idpersonne=50632&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -1747,14 +1747,14 @@ def test_get_agenda_full_endpoint_bad_date_format(app, resource, value): % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' resp = app.get( '/caluire-axel/test/get_agenda_full?NameID=yyy&idpersonne=50632&start_date=2020-09-01&end_date=%s' % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' @@ -1762,7 +1762,7 @@ def test_get_agenda_full_endpoint_no_result(app, resource): resp = app.get( '/caluire-axel/test/get_agenda_full?NameID=yyy&idpersonne=50632&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', family_id='XXX', person_id='42') @@ -1773,7 +1773,7 @@ def test_get_agenda_full_endpoint_no_result(app, resource): resp = app.get( '/caluire-axel/test/get_agenda_full?NameID=yyy&idpersonne=zzz&start_date=2020-09-01&end_date=2021-08-31' ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' @@ -1787,7 +1787,7 @@ def test_get_agenda_full_endpoint_date_error(app, resource): '/caluire-axel/test/get_agenda_full?NameID=yyy&idpersonne=50632&start_date=2021-05-31&end_date=2021-05-30', status=400, ) - assert resp.json['err_desc'] == "start_date should be before end_date" + assert resp.json['err_desc'] == 'start_date should be before end_date' assert resp.json['err'] == 'bad-request' resp = app.get( @@ -1795,7 +1795,7 @@ def test_get_agenda_full_endpoint_date_error(app, resource): status=400, ) assert ( - resp.json['err_desc'] == "start_date and end_date are in different reference year (2021 != 2022)" + resp.json['err_desc'] == 'start_date and end_date are in different reference year (2021 != 2022)' ) assert resp.json['err'] == 'bad-request' @@ -1918,7 +1918,7 @@ def test_set_agenda_endpoint_axel_error(app, resource, family_data, activities, with mock.patch('passerelle.contrib.caluire_axel.schemas.get_famille_individus') as operation: operation.side_effect = AxelError('FooBar') resp = app.post_json('/caluire-axel/test/set_agenda?NameID=yyy', params=booking_params) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/family_info.xml') @@ -1928,7 +1928,7 @@ def test_set_agenda_endpoint_axel_error(app, resource, family_data, activities, with mock.patch('passerelle.contrib.caluire_axel.schemas.get_list_activites') as operation: operation.side_effect = AxelError('FooBar') resp = app.post_json('/caluire-axel/test/set_agenda?NameID=yyy', params=booking_params) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/activities_info.xml') @@ -1942,7 +1942,7 @@ def test_set_agenda_endpoint_axel_error(app, resource, family_data, activities, with mock.patch('passerelle.contrib.caluire_axel.schemas.get_agenda') as operation: operation.side_effect = AxelError('FooBar') resp = app.post_json('/caluire-axel/test/set_agenda?NameID=yyy', params=booking_params) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' content = ''' @@ -1969,14 +1969,14 @@ def test_set_agenda_endpoint_axel_error(app, resource, family_data, activities, with mock.patch('passerelle.contrib.caluire_axel.schemas.set_agenda') as operation: operation.side_effect = AxelError('FooBar') resp = app.post_json('/caluire-axel/test/set_agenda?NameID=yyy', params=booking_params) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' def test_set_agenda_endpoint_no_result(app, resource, booking_params): booking_params['child_id'] = 'zzz' resp = app.post_json('/caluire-axel/test/set_agenda?NameID=yyy', params=booking_params) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', family_id='XXX', person_id='42') @@ -1985,7 +1985,7 @@ def test_set_agenda_endpoint_no_result(app, resource, booking_params): content = xml.read() with mock_data(content, 'GetFamilleIndividus'): resp = app.post_json('/caluire-axel/test/set_agenda?NameID=yyy', params=booking_params) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' @@ -1998,13 +1998,13 @@ def test_set_agenda_endpoint_date_error(app, resource, booking_params): booking_params['start_date'] = '2021-09-01' booking_params['end_date'] = '2021-08-31' resp = app.post_json('/caluire-axel/test/set_agenda?NameID=yyy', params=booking_params, status=400) - assert resp.json['err_desc'] == "start_date should be before end_date" + assert resp.json['err_desc'] == 'start_date should be before end_date' assert resp.json['err'] == 'bad-request' booking_params['end_date'] = '2022-09-01' resp = app.post_json('/caluire-axel/test/set_agenda?NameID=yyy', params=booking_params, status=400) assert ( - resp.json['err_desc'] == "start_date and end_date are in different reference year (2021 != 2022)" + resp.json['err_desc'] == 'start_date and end_date are in different reference year (2021 != 2022)' ) assert resp.json['err'] == 'bad-request' @@ -2225,7 +2225,7 @@ def test_set_agenda_endpoint_multi_matin(app, resource, family_data, booking_par params=booking_params, status=400, ) - assert resp.json['err_desc'] == "not possible to book %s the same day" % ' and '.join( + assert resp.json['err_desc'] == 'not possible to book %s the same day' % ' and '.join( booked ) assert resp.json['err'] == 'bad-request' @@ -2291,7 +2291,7 @@ def test_set_agenda_endpoint_multi_matin_no_changes(app, resource, family_data, params=booking_params, status=400, ) - assert resp.json['err_desc'] == "not possible to book %s the same day" % ' and '.join( + assert resp.json['err_desc'] == 'not possible to book %s the same day' % ' and '.join( booking_params['booking_list'] ) assert resp.json['err'] == 'bad-request' @@ -2380,7 +2380,7 @@ def test_set_agenda_endpoint_multi_soir(app, resource, family_data, booking_para params=booking_params, status=400, ) - assert resp.json['err_desc'] == "not possible to book %s the same day" % ' and '.join( + assert resp.json['err_desc'] == 'not possible to book %s the same day' % ' and '.join( booked ) assert resp.json['err'] == 'bad-request' @@ -2456,7 +2456,7 @@ def test_set_agenda_endpoint_multi_soir_no_changes(app, resource, family_data, b params=booking_params, status=400, ) - assert resp.json['err_desc'] == "not possible to book %s the same day" % ' and '.join( + assert resp.json['err_desc'] == 'not possible to book %s the same day' % ' and '.join( booking_params['booking_list'] ) assert resp.json['err'] == 'bad-request' @@ -2687,7 +2687,7 @@ def test_set_agenda_endpoint_wrong_code(app, resource, family_data, activities, '/caluire-axel/test/set_agenda?NameID=yyy', params=booking_params, ) - assert resp.json['err_desc'] == "Wrong agenda status" + assert resp.json['err_desc'] == 'Wrong agenda status' assert resp.json['err'] == 'agenda-code-error-%s' % code @@ -2700,7 +2700,7 @@ def test_set_agenda_apply_changes_endpoint_axel_error(app, resource, family_data with mock.patch('passerelle.contrib.caluire_axel.schemas.get_famille_individus') as operation: operation.side_effect = AxelError('FooBar') resp = app.post_json('/caluire-axel/test/set_agenda_apply_changes?NameID=yyy', params=changes_params) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/family_info.xml') @@ -2712,7 +2712,7 @@ def test_set_agenda_apply_changes_endpoint_axel_error(app, resource, family_data resp = app.post_json( '/caluire-axel/test/set_agenda_apply_changes?NameID=yyy', params=changes_params ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/activities_info.xml') @@ -2728,7 +2728,7 @@ def test_set_agenda_apply_changes_endpoint_axel_error(app, resource, family_data resp = app.post_json( '/caluire-axel/test/set_agenda_apply_changes?NameID=yyy', params=changes_params ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' activities = [ @@ -2765,14 +2765,14 @@ def test_set_agenda_apply_changes_endpoint_axel_error(app, resource, family_data resp = app.post_json( '/caluire-axel/test/set_agenda_apply_changes?NameID=yyy', params=changes_params ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' def test_set_agenda_apply_changes_endpoint_no_result(app, resource, changes_params): changes_params['child_id'] = 'zzz' resp = app.post_json('/caluire-axel/test/set_agenda_apply_changes?NameID=yyy', params=changes_params) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', family_id='XXX', person_id='42') @@ -2781,7 +2781,7 @@ def test_set_agenda_apply_changes_endpoint_no_result(app, resource, changes_para content = xml.read() with mock_data(content, 'GetFamilleIndividus'): resp = app.post_json('/caluire-axel/test/set_agenda_apply_changes?NameID=yyy', params=changes_params) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' @@ -2796,7 +2796,7 @@ def test_set_agenda_apply_changes_endpoint_date_error(app, resource, changes_par resp = app.post_json( '/caluire-axel/test/set_agenda_apply_changes?NameID=yyy', params=changes_params, status=400 ) - assert resp.json['err_desc'] == "start_date should be before end_date" + assert resp.json['err_desc'] == 'start_date should be before end_date' assert resp.json['err'] == 'bad-request' changes_params['end_date'] = '2022-09-01' @@ -2804,7 +2804,7 @@ def test_set_agenda_apply_changes_endpoint_date_error(app, resource, changes_par '/caluire-axel/test/set_agenda_apply_changes?NameID=yyy', params=changes_params, status=400 ) assert ( - resp.json['err_desc'] == "start_date and end_date are in different reference year (2021 != 2022)" + resp.json['err_desc'] == 'start_date and end_date are in different reference year (2021 != 2022)' ) assert resp.json['err'] == 'bad-request' @@ -3035,7 +3035,7 @@ def test_set_agenda_apply_changes_endpoint_multi_too_many_cantine_activities( ) assert ( resp.json['err_desc'] - == "more than one activity cantine found for this child (ECOLELEM, ECOLELEM2)" + == 'more than one activity cantine found for this child (ECOLELEM, ECOLELEM2)' ) assert resp.json['err'] == 'bad-request' @@ -3185,7 +3185,7 @@ def test_set_agenda_apply_changes_endpoint_multi_soir(app, resource, family_data ) assert ( resp.json['err_desc'] - == "not possible to book 50632:ETUDES:2020-09-07 and 50632:GARDERIES:2020-09-07 the same day" + == 'not possible to book 50632:ETUDES:2020-09-07 and 50632:GARDERIES:2020-09-07 the same day' ) assert resp.json['err'] == 'bad-request' @@ -3238,7 +3238,7 @@ def test_set_activity_agenda_typical_week_endpoint_axel_error( resp = app.post_json( '/caluire-axel/test/set_activity_agenda_typical_week?NameID=yyy', params=week_booking_params ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/family_info.xml') @@ -3250,7 +3250,7 @@ def test_set_activity_agenda_typical_week_endpoint_axel_error( resp = app.post_json( '/caluire-axel/test/set_activity_agenda_typical_week?NameID=yyy', params=week_booking_params ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/activities_info.xml') @@ -3267,7 +3267,7 @@ def test_set_activity_agenda_typical_week_endpoint_axel_error( '/caluire-axel/test/set_activity_agenda_typical_week?NameID=yyy', params=week_booking_params, ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' content = ''' @@ -3297,7 +3297,7 @@ def test_set_activity_agenda_typical_week_endpoint_axel_error( '/caluire-axel/test/set_activity_agenda_typical_week?NameID=yyy', params=week_booking_params, ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -3306,7 +3306,7 @@ def test_set_activity_agenda_typical_week_endpoint_no_result(app, resource, week resp = app.post_json( '/caluire-axel/test/set_activity_agenda_typical_week?NameID=yyy', params=week_booking_params ) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', family_id='XXX', person_id='42') @@ -3317,7 +3317,7 @@ def test_set_activity_agenda_typical_week_endpoint_no_result(app, resource, week resp = app.post_json( '/caluire-axel/test/set_activity_agenda_typical_week?NameID=yyy', params=week_booking_params ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' @@ -3334,7 +3334,7 @@ def test_set_activity_agenda_typical_week_endpoint_date_error(app, resource, wee params=week_booking_params, status=400, ) - assert resp.json['err_desc'] == "start_date should be before end_date" + assert resp.json['err_desc'] == 'start_date should be before end_date' assert resp.json['err'] == 'bad-request' week_booking_params['end_date'] = '2022-09-01' @@ -3344,7 +3344,7 @@ def test_set_activity_agenda_typical_week_endpoint_date_error(app, resource, wee status=400, ) assert ( - resp.json['err_desc'] == "start_date and end_date are in different reference year (2021 != 2022)" + resp.json['err_desc'] == 'start_date and end_date are in different reference year (2021 != 2022)' ) assert resp.json['err'] == 'bad-request' @@ -3512,7 +3512,7 @@ def test_set_activity_agenda_typical_week_endpoint_extrascolaire_and_classe_deco params=week_booking_params, status=400, ) - assert resp.json['err_desc'] == "Not available for this activity" + assert resp.json['err_desc'] == 'Not available for this activity' assert resp.json['err'] == 'bad-request' @@ -3617,7 +3617,7 @@ def test_set_activity_agenda_typical_week_endpoint_wrong_code( '/caluire-axel/test/set_activity_agenda_typical_week?NameID=yyy', params=week_booking_params, ) - assert resp.json['err_desc'] == "Wrong agenda status" + assert resp.json['err_desc'] == 'Wrong agenda status' assert resp.json['err'] == 'agenda-code-error-%s' % code @@ -3626,7 +3626,7 @@ def test_invoices_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.caluire_axel.schemas.get_factures_a_payer') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/caluire-axel/test/regie/MAREGIE/invoices?NameID=yyy') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' content = ''' @@ -3636,13 +3636,13 @@ def test_invoices_endpoint_axel_error(app, resource): ''' with mock_data(content, 'GetFacturesaPayer'): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoices?NameID=yyy') - assert resp.json['err_desc'] == "Wrong get-invoices status" + assert resp.json['err_desc'] == 'Wrong get-invoices status' assert resp.json['err'] == 'get-invoices-code-error--3' def test_invoices_endpoint_no_result(app, resource): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoices?NameID=yyy') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -3733,7 +3733,7 @@ def test_invoices_history_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.caluire_axel.schemas.get_list_factures') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/caluire-axel/test/regie/MAREGIE/invoices/history?NameID=yyy') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' content = ''' @@ -3743,7 +3743,7 @@ def test_invoices_history_endpoint_axel_error(app, resource): ''' with mock_data(content, 'GetListFactures'): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoices/history?NameID=yyy') - assert resp.json['err_desc'] == "Wrong get-historical-invoices status" + assert resp.json['err_desc'] == 'Wrong get-historical-invoices status' assert resp.json['err'] == 'get-historical-invoices-code-error--3' @@ -3754,13 +3754,13 @@ def test_invoices_history_endpoint_bad_request(app, resource): '/caluire-axel/test/regie/MAREGIE/invoices/history?NameID=yyy&nb_mounts_limit=not_a_number', status=400, ) - assert resp.json['err_desc'] == "nb_mounts_limit must be an integer" + assert resp.json['err_desc'] == 'nb_mounts_limit must be an integer' assert resp.json['err'] == 'bad-request' def test_invoices_history_endpoint_no_result(app, resource): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoices/history?NameID=yyy') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -3848,7 +3848,7 @@ def test_invoice_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.caluire_axel.schemas.get_factures_a_payer') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/caluire-axel/test/regie/MAREGIE/invoice/XXX-42?NameID=yyy') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -3859,7 +3859,7 @@ def test_invoice_endpoint_bad_request(app, resource): '/caluire-axel/test/regie/MAREGIE/invoice/historical-XXX-42?NameID=yyy&nb_mounts_limit=not_a_number', status=400, ) - assert resp.json['err_desc'] == "nb_mounts_limit must be an integer" + assert resp.json['err_desc'] == 'nb_mounts_limit must be an integer' assert resp.json['err'] == 'bad-request' content = ''' @@ -3869,7 +3869,7 @@ def test_invoice_endpoint_bad_request(app, resource): ''' with mock_data(content, 'GetFacturesaPayer'): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoice/XXX-42?NameID=yyy') - assert resp.json['err_desc'] == "Wrong get-invoices status" + assert resp.json['err_desc'] == 'Wrong get-invoices status' assert resp.json['err'] == 'get-invoices-code-error--3' content = ''' @@ -3879,7 +3879,7 @@ def test_invoice_endpoint_bad_request(app, resource): ''' with mock_data(content, 'GetListFactures'): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoice/historical-XXX-42?NameID=yyy') - assert resp.json['err_desc'] == "Wrong get-historical-invoices status" + assert resp.json['err_desc'] == 'Wrong get-historical-invoices status' assert resp.json['err'] == 'get-historical-invoices-code-error--3' @@ -3899,12 +3899,12 @@ def test_invoice_endpoint_no_result(app, resource): ) with mock_data(content, 'GetFacturesaPayer'): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoice/XXX-35?NameID=yyy') - assert resp.json['err_desc'] == "Invoice not found" + assert resp.json['err_desc'] == 'Invoice not found' assert resp.json['err'] == 'not-found' with mock_data(content, 'GetFacturesaPayer'): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoice/XXX-44?NameID=yyy') - assert resp.json['err_desc'] == "Invoice not found" + assert resp.json['err_desc'] == 'Invoice not found' assert resp.json['err'] == 'not-found' content = ( @@ -3917,12 +3917,12 @@ def test_invoice_endpoint_no_result(app, resource): ) with mock_data(content, 'GetListFactures'): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoice/historical-XXX-35?NameID=yyy') - assert resp.json['err_desc'] == "Invoice not found" + assert resp.json['err_desc'] == 'Invoice not found' assert resp.json['err'] == 'not-found' with mock_data(content, 'GetListFactures'): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoice/historical-XXX-44?NameID=yyy') - assert resp.json['err_desc'] == "Invoice not found" + assert resp.json['err_desc'] == 'Invoice not found' assert resp.json['err'] == 'not-found' @@ -4087,7 +4087,7 @@ def test_invoice_pdf_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.caluire_axel.schemas.get_factures_a_payer') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/caluire-axel/test/regie/MAREGIE/invoice/XXX-42/pdf?NameID=yyy', status=404) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/invoices.xml') @@ -4104,7 +4104,7 @@ def test_invoice_pdf_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.caluire_axel.schemas.get_pdf_facture') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/caluire-axel/test/regie/MAREGIE/invoice/XXX-42/pdf?NameID=yyy', status=404) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -4115,14 +4115,14 @@ def test_invoice_pdf_endpoint_bad_request(app, resource): '/caluire-axel/test/regie/MAREGIE/invoice/historical-XXX-42/pdf?NameID=yyy&nb_mounts_limit=not_a_number', status=404, ) - assert resp.json['err_desc'] == "nb_mounts_limit must be an integer" + assert resp.json['err_desc'] == 'nb_mounts_limit must be an integer' assert resp.json['err'] == 'bad-request' @freezegun.freeze_time('2019-12-13') def test_invoice_pdf_endpoint_no_result(app, resource): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoice/XXX-42/pdf?NameID=yyy', status=404) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', family_id='XXX', person_id='42') @@ -4138,17 +4138,17 @@ def test_invoice_pdf_endpoint_no_result(app, resource): ) with mock_data(content, 'GetFacturesaPayer'): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoice/XXX-35/pdf?NameID=yyy', status=404) - assert resp.json['err_desc'] == "Invoice not found" + assert resp.json['err_desc'] == 'Invoice not found' assert resp.json['err'] == 'not-found' with mock_data(content, 'GetFacturesaPayer'): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoice/XXX-44/pdf?NameID=yyy', status=404) - assert resp.json['err_desc'] == "Invoice not found" + assert resp.json['err_desc'] == 'Invoice not found' assert resp.json['err'] == 'not-found' with mock_data(content, 'GetFacturesaPayer'): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoice/XXX-43/pdf?NameID=yyy', status=404) - assert resp.json['err_desc'] == "PDF not available" + assert resp.json['err_desc'] == 'PDF not available' assert resp.json['err'] == 'not-available' pdf_content = ''' @@ -4161,7 +4161,7 @@ def test_invoice_pdf_endpoint_no_result(app, resource): invoice.return_value = {'has_pdf': True, 'display_id': '42'} with mock_data(pdf_content, 'GetPdfFacture'): resp = app.get('/caluire-axel/test/regie/MAREGIE/invoice/XXX-42/pdf?NameID=yyy', status=404) - assert resp.json['err_desc'] == "PDF error" + assert resp.json['err_desc'] == 'PDF error' assert resp.json['err'] == 'error' @@ -4195,7 +4195,7 @@ def test_pay_invoice_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.caluire_axel.schemas.get_factures_a_payer') as operation: operation.side_effect = AxelError('FooBar') resp = app.post_json('/caluire-axel/test/regie/MAREGIE/invoice/XXX-42/pay?NameID=yyy', params=payload) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/caluire_axel/invoices.xml') @@ -4214,7 +4214,7 @@ def test_pay_invoice_endpoint_axel_error(app, resource): resp = app.post_json( '/caluire-axel/test/regie/MAREGIE/invoice/XXX-42/pay?NameID=yyy', params=payload ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' for key in ('xml_request', 'xml_response', 'regie_id', 'family_id', 'invoice', 'post_data', 'kwargs'): assert key in resp.json['data'].keys() @@ -4232,7 +4232,7 @@ def test_pay_invoice_endpoint_axel_error(app, resource): ) client.return_value.service.setData.return_value = XML_RESPONSE_TEMPLATE % ('SetPaiement', content2) resp = app.post_json('/caluire-axel/test/regie/MAREGIE/invoice/XXX-42/pay?NameID=yyy', params=payload) - assert resp.json['err_desc'] == "Wrong pay-invoice status" + assert resp.json['err_desc'] == 'Wrong pay-invoice status' assert resp.json['err'] == 'pay-invoice-code-error--3' @@ -4261,12 +4261,12 @@ def test_pay_invoice_endpoint_no_result(app, resource): ) with mock_data(content, 'GetFacturesaPayer'): resp = app.post_json('/caluire-axel/test/regie/MAREGIE/invoice/XXX-35/pay?NameID=yyy', params=payload) - assert resp.json['err_desc'] == "Invoice not found" + assert resp.json['err_desc'] == 'Invoice not found' assert resp.json['err'] == 'not-found' with mock_data(content, 'GetFacturesaPayer'): resp = app.post_json('/caluire-axel/test/regie/MAREGIE/invoice/XXX-44/pay?NameID=yyy', params=payload) - assert resp.json['err_desc'] == "Invoice not found" + assert resp.json['err_desc'] == 'Invoice not found' assert resp.json['err'] == 'not-found' @@ -4376,7 +4376,7 @@ def test_upload_attachments_endpoint_axel_error(app, resource, family_data, uplo resp = app.post_json( '/caluire-axel/test/upload_attachments?NameID=yyy', params=upload_attachments_params ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' with mock.patch( @@ -4388,7 +4388,7 @@ def test_upload_attachments_endpoint_axel_error(app, resource, family_data, uplo resp = app.post_json( '/caluire-axel/test/upload_attachments?NameID=yyy', params=upload_attachments_params ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -4407,7 +4407,7 @@ def test_upload_attachments_endpoint_bad_date_format(app, resource, upload_attac def test_upload_attachments_endpoint_no_result(app, resource, family_data, upload_attachments_params): resp = app.post_json('/caluire-axel/test/upload_attachments?NameID=yyy', params=upload_attachments_params) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', family_id='XXX', person_id='42') @@ -4420,7 +4420,7 @@ def test_upload_attachments_endpoint_no_result(app, resource, family_data, uploa '/caluire-axel/test/upload_attachments?NameID=yyy', params=upload_attachments_params, ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' @@ -4471,5 +4471,5 @@ def test_upload_attachments_wrong_code(app, resource, family_data, upload_attach params=upload_attachments_params, ) - assert resp.json['err_desc'] == "Wrong upload-attachments status" + assert resp.json['err_desc'] == 'Wrong upload-attachments status' assert resp.json['err'] == 'upload-attachments-code-error-%s' % code diff --git a/tests/test_cartads_cs.py b/tests/test_cartads_cs.py index fb3e0c2c..9616f7ae 100644 --- a/tests/test_cartads_cs.py +++ b/tests/test_cartads_cs.py @@ -131,10 +131,10 @@ class FakeService: ('DateDemande', datetime.datetime(2019, 4, 15, 0, 0)), ('DatePresentation', None), ('DateReception', None), - ('Descriptif', "Un document graphique..."), + ('Descriptif', 'Un document graphique...'), ('IdDosPiece', 133837), ('IdPiece', 58), - ('LibellePiece', "Document graphique permettant..."), + ('LibellePiece', 'Document graphique permettant...'), ('NbDocuments', 0), ] ), @@ -174,7 +174,7 @@ class FakeService: ('DateDemande', None), ('DatePresentation', None), ('DateReception', None), - ('Descriptif', "Dans les cas..."), + ('Descriptif', 'Dans les cas...'), ('IdDosPiece', 0), ('IdPiece', 192), ('LibellePiece', 'Document du...'), @@ -494,7 +494,7 @@ def test_status_zip_not_considered_error(connector, app, cached_data): dossier.zip_ack_response = 'False' dossier.save() resp = app.get('/cartads-cs/test/status?dossier_id=%s' % dossier.id) - assert resp.json['status_label'] == "File not considered" + assert resp.json['status_label'] == 'File not considered' def test_additional_pieces_management(connector, app, cached_data): diff --git a/tests/test_cityweb.py b/tests/test_cityweb.py index de4d1d99..5a485cc0 100644 --- a/tests/test_cityweb.py +++ b/tests/test_cityweb.py @@ -67,7 +67,7 @@ def assert_xml_doc(filename, assertions): content = fd.read() xml_content = etree.fromstring(content) assert len(xml_content.nsmap) == 1 - assert xml_content.nsmap['xs'] == "http://tempuri.org/XMLSchema.xsd" + assert xml_content.nsmap['xs'] == 'http://tempuri.org/XMLSchema.xsd' schema.assertValid(xml_content) root = xobject.fromstring(content) for epath, value in assertions.items(): diff --git a/tests/test_clicrdv.py b/tests/test_clicrdv.py index 69c52816..fcd7d3d1 100644 --- a/tests/test_clicrdv.py +++ b/tests/test_clicrdv.py @@ -31,7 +31,7 @@ def test_connector_is_legacy(connector, app, admin_user): @mock.patch('passerelle.utils.Request.request') def test_request_call(mocked_request, app, connector): - mocked_request.json.return_value = "foo" + mocked_request.json.return_value = 'foo' connector.request('bar') assert mocked_request.call_count == 1 req = mocked_request.call_args[0][1] @@ -42,21 +42,21 @@ def test_request_call(mocked_request, app, connector): def test_interventionsets(mocked_request, app, connector): response = mock.Mock() response.json.return_value = { - "totalRecords": 2, - "records": [ + 'totalRecords': 2, + 'records': [ { - "sort": 1, - "publicname": "Une Demande de Passeport", - "name": "Demande", - "id": 7032, - "group_id": 5242, + 'sort': 1, + 'publicname': 'Une Demande de Passeport', + 'name': 'Demande', + 'id': 7032, + 'group_id': 5242, }, { - "sort": 2, - "publicname": "Un Retrait de Passeport", - "name": "Retrait", - "id": 7033, - "group_id": 5242, + 'sort': 2, + 'publicname': 'Un Retrait de Passeport', + 'name': 'Retrait', + 'id': 7033, + 'group_id': 5242, }, ], } @@ -70,27 +70,27 @@ def test_interventionsets(mocked_request, app, connector): def test_interventionsets_details(mocked_request, app, connector): response = mock.Mock() response.json.return_value = { - "totalRecords": 2, - "records": [ + 'totalRecords': 2, + 'records': [ { - "sort": 1, - "publicname": "pour une personne", - "description": None, - "name": "1 personne", - "interventionset_id": 7032, - "group_id": 5242, - "id": 63258, - "abbr": "1 demande", + 'sort': 1, + 'publicname': 'pour une personne', + 'description': None, + 'name': '1 personne', + 'interventionset_id': 7032, + 'group_id': 5242, + 'id': 63258, + 'abbr': '1 demande', }, { - "sort": 2, - "publicname": "pour deuxs personnes", - "description": None, - "name": "2 personnes", - "interventionset_id": 7032, - "group_id": 5242, - "id": 63259, - "abbr": "2 demandes", + 'sort': 2, + 'publicname': 'pour deuxs personnes', + 'description': None, + 'name': '2 personnes', + 'interventionset_id': 7032, + 'group_id': 5242, + 'id': 63259, + 'abbr': '2 demandes', }, ], } @@ -103,7 +103,7 @@ def test_interventionsets_details(mocked_request, app, connector): @mock.patch('passerelle.utils.Request.request') def test_interventions_get_datetimes(mocked_request, app, connector): response = mock.Mock() - response.json.return_value = {"availabletimeslots": []} + response.json.return_value = {'availabletimeslots': []} mocked_request.return_value = response resp = app.get('/clicrdv/test/interventions/63258/dates/') assert resp.json.get('data') == [] @@ -126,7 +126,7 @@ def test_interventions_get_datetimes(mocked_request, app, connector): assert query['format'] == ['json'] response.json.return_value = { - "availabletimeslots": [{"start": "2016-09-21 12:34:56"}, {"start": "2016-09-22 11:22:33"}] + 'availabletimeslots': [{'start': '2016-09-21 12:34:56'}, {'start': '2016-09-22 11:22:33'}] } mocked_request.return_value = response resp = app.get('/clicrdv/test/interventions/63258/dates/').json @@ -137,7 +137,7 @@ def test_interventions_get_datetimes(mocked_request, app, connector): assert resp['data'][1] == {'id': '2016-09-22', 'text': '22 September 2016'} response.json.return_value = { - "availabletimeslots": [{"start": "2016-09-22 11:22:33"}, {"start": "2016-09-21 12:34:56"}] + 'availabletimeslots': [{'start': '2016-09-22 11:22:33'}, {'start': '2016-09-21 12:34:56'}] } # will be sorted mocked_request.return_value = response resp = app.get('/clicrdv/test/interventions/63258/datetimes/').json @@ -148,7 +148,7 @@ def test_interventions_get_datetimes(mocked_request, app, connector): assert resp['data'][1] == {'id': '2016-09-22-11:22:33', 'text': '22 September 2016 11:22'} response.json.return_value = { - "availabletimeslots": [{"start": "2016-09-21 12:34:56"}, {"start": "2016-09-21 11:22:33"}] + 'availabletimeslots': [{'start': '2016-09-21 12:34:56'}, {'start': '2016-09-21 11:22:33'}] } # will be sorted mocked_request.return_value = response resp = app.get('/clicrdv/test/interventions/63258/2016-09-21/times').json @@ -167,11 +167,11 @@ def test_interventions_get_datetimes(mocked_request, app, connector): @mock.patch('passerelle.utils.Request.request') def test_interventions_get_datetimes_error(mocked_request, app, connector): def raise_for_status(): - raise HTTPError("400 Client Error: Bad Request for url: xxx") + raise HTTPError('400 Client Error: Bad Request for url: xxx') response = mock.Mock() response.json.return_value = [ - {"error": "The intervention_ids parameter contains at least one invalid id"} + {'error': 'The intervention_ids parameter contains at least one invalid id'} ] response.raise_for_status = raise_for_status mocked_request.return_value = response @@ -196,10 +196,10 @@ def test_cancel_appointment(mocked_request, app, connector): @mock.patch('passerelle.utils.Request.request') def test_failed_cancel_appointment(mocked_request, app, connector): def raise_for_status(): - raise HTTPError("400 Client Error: Bad Request for url: xxx") + raise HTTPError('400 Client Error: Bad Request for url: xxx') response = mock.Mock() - response.json.return_value = [{"msg": "cancel failed"}] + response.json.return_value = [{'msg': 'cancel failed'}] response.raise_for_status = raise_for_status mocked_request.return_value = response obj_type = ContentType.objects.get_for_model(ClicRdv) @@ -217,10 +217,10 @@ def test_failed_cancel_appointment(mocked_request, app, connector): @mock.patch('passerelle.utils.Request.request') def test_failed_appointment_creation(mocked_request, app, connector): def raise_for_status(): - raise HTTPError("400 Client Error: Bad Request for url: xxx") + raise HTTPError('400 Client Error: Bad Request for url: xxx') response = mock.Mock() - response.json.return_value = [{"msg": "creation failed"}] + response.json.return_value = [{'msg': 'creation failed'}] response.raise_for_status = raise_for_status mocked_request.return_value = response obj_type = ContentType.objects.get_for_model(ClicRdv) diff --git a/tests/test_cmis.py b/tests/test_cmis.py index d42dc575..26cd48c7 100644 --- a/tests/test_cmis.py +++ b/tests/test_cmis.py @@ -71,12 +71,12 @@ def test_uploadfile(app, setup, tmpdir, monkeypatch): object_type=None, properties=None, ): - assert content_type == "image/jpeg" + assert content_type == 'image/jpeg' with open(file_name, 'wb') as f: f.write(file_byte_content) - return Mock(properties={"toto": "tata"}) + return Mock(properties={'toto': 'tata'}) - file_name = "testfile.whatever" + file_name = 'testfile.whatever' file_content = 'aaaa' monkeypatch.chdir(tmpdir) import passerelle.apps.cmis.models @@ -85,8 +85,8 @@ def test_uploadfile(app, setup, tmpdir, monkeypatch): response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "path": "/some/folder/structure", - "file": {"filename": file_name, "content": b64encode(file_content), "content_type": "image/jpeg"}, + 'path': '/some/folder/structure', + 'file': {'filename': file_name, 'content': b64encode(file_content), 'content_type': 'image/jpeg'}, }, ) result_file = py.path.local(file_name) @@ -95,15 +95,15 @@ def test_uploadfile(app, setup, tmpdir, monkeypatch): assert result_file.read() == file_content json_result = response.json assert json_result['err'] == 0 - assert json_result['data']['properties'] == {"toto": "tata"} + assert json_result['data']['properties'] == {'toto': 'tata'} - file_name_overwrite = "testfile.whatever.overwrite" + file_name_overwrite = 'testfile.whatever.overwrite' response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "path": "/some/folder/structure", - "file": {"filename": file_name, "content": b64encode(file_content), "content_type": "image/jpeg"}, - "filename": file_name_overwrite, + 'path': '/some/folder/structure', + 'file': {'filename': file_name, 'content': b64encode(file_content), 'content_type': 'image/jpeg'}, + 'filename': file_name_overwrite, }, ) result_file = py.path.local(file_name_overwrite) @@ -112,7 +112,7 @@ def test_uploadfile(app, setup, tmpdir, monkeypatch): assert result_file.read() == file_content json_result = response.json assert json_result['err'] == 0 - assert json_result['data']['properties'] == {"toto": "tata"} + assert json_result['data']['properties'] == {'toto': 'tata'} def test_upload_file_metadata(app, setup, monkeypatch): @@ -126,21 +126,21 @@ def test_upload_file_metadata(app, setup, monkeypatch): response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "path": "/some/folder/structure", - "file": {"filename": "bla", "content": b64encode('bla')}, - "object_type": "D:dui:type", - "properties": { - "cmis:description": "Coucou", - "dui:tnumDossier": "42", + 'path': '/some/folder/structure', + 'file': {'filename': 'bla', 'content': b64encode('bla')}, + 'object_type': 'D:dui:type', + 'properties': { + 'cmis:description': 'Coucou', + 'dui:tnumDossier': '42', }, - "properties/dui:ttypeStructure": "Accueil de loisirs", + 'properties/dui:ttypeStructure': 'Accueil de loisirs', }, ) assert response.json['data']['properties'] == { - "cmis:objectTypeId": "D:dui:type", - "cmis:description": "Coucou", - "dui:tnumDossier": "42", - "dui:ttypeStructure": "Accueil de loisirs", + 'cmis:objectTypeId': 'D:dui:type', + 'cmis:description': 'Coucou', + 'dui:tnumDossier': '42', + 'dui:ttypeStructure': 'Accueil de loisirs', } @@ -148,8 +148,8 @@ def test_uploadfile_error_if_no_file_name(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "path": "/some/folder/structure", - "file": {"content": b64encode('aaaa'), "content_type": "image/jpeg"}, + 'path': '/some/folder/structure', + 'file': {'content': b64encode('aaaa'), 'content_type': 'image/jpeg'}, }, expect_errors=True, ) @@ -162,8 +162,8 @@ def test_uploadfile_error_if_non_string_file_name(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "path": "/some/folder/structure", - "file": {"filename": 1, "content": b64encode('aaaa'), "content_type": "image/jpeg"}, + 'path': '/some/folder/structure', + 'file': {'filename': 1, 'content': b64encode('aaaa'), 'content_type': 'image/jpeg'}, }, expect_errors=True, ) @@ -174,9 +174,9 @@ def test_uploadfile_error_if_non_string_file_name(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "path": "/some/folder/structure", - "file": {"content": b64encode('aaaa'), "content_type": "image/jpeg"}, - "filename": 1, + 'path': '/some/folder/structure', + 'file': {'content': b64encode('aaaa'), 'content_type': 'image/jpeg'}, + 'filename': 1, }, expect_errors=True, ) @@ -189,8 +189,8 @@ def test_uploadfile_error_if_non_valid_file_name(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "path": "/some/folder/structure", - "file": {"filename": ",.,", "content": b64encode('aaaa'), "content_type": "image/jpeg"}, + 'path': '/some/folder/structure', + 'file': {'filename': ',.,', 'content': b64encode('aaaa'), 'content_type': 'image/jpeg'}, }, expect_errors=True, ) @@ -201,9 +201,9 @@ def test_uploadfile_error_if_non_valid_file_name(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "path": "/some/folder/structure", - "file": {"content": b64encode('aaaa'), "content_type": "image/jpeg"}, - "filename": ",.,", + 'path': '/some/folder/structure', + 'file': {'content': b64encode('aaaa'), 'content_type': 'image/jpeg'}, + 'filename': ',.,', }, expect_errors=True, ) @@ -216,7 +216,7 @@ def test_uploadfile_error_if_no_path(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "file": {"filename": 'somefile.txt', "content": b64encode('aaaa'), "content_type": "image/jpeg"} + 'file': {'filename': 'somefile.txt', 'content': b64encode('aaaa'), 'content_type': 'image/jpeg'} }, expect_errors=True, ) @@ -229,8 +229,8 @@ def test_uploadfile_error_if_non_string_path(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "path": 1, - "file": {"filename": 'somefile.txt', "content": b64encode('aaaa'), "content_type": "image/jpeg"}, + 'path': 1, + 'file': {'filename': 'somefile.txt', 'content': b64encode('aaaa'), 'content_type': 'image/jpeg'}, }, expect_errors=True, ) @@ -243,8 +243,8 @@ def test_uploadfile_error_if_no_regular_path(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "path": "no/leading/slash", - "file": {"filename": 'somefile.txt', "content": b64encode('aaaa'), "content_type": "image/jpeg"}, + 'path': 'no/leading/slash', + 'file': {'filename': 'somefile.txt', 'content': b64encode('aaaa'), 'content_type': 'image/jpeg'}, }, expect_errors=True, ) @@ -257,8 +257,8 @@ def test_uploadfile_error_if_no_file_content(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "path": "/some/folder/structure", - "file": {"filename": 'somefile.txt', "content_type": "image/jpeg"}, + 'path': '/some/folder/structure', + 'file': {'filename': 'somefile.txt', 'content_type': 'image/jpeg'}, }, expect_errors=True, ) @@ -271,8 +271,8 @@ def test_uploadfile_error_if_non_string_file_content(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "path": "/some/folder/structure", - "file": {"filename": 'somefile.txt', "content": 1, "content_type": "image/jpeg"}, + 'path': '/some/folder/structure', + 'file': {'filename': 'somefile.txt', 'content': 1, 'content_type': 'image/jpeg'}, }, expect_errors=True, ) @@ -285,8 +285,8 @@ def test_uploadfile_error_if_no_proper_base64_encoding(app, setup): response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "path": "/some/folder/structure", - "file": {"filename": 'somefile.txt', "content": "1", "content_type": "image/jpeg"}, + 'path': '/some/folder/structure', + 'file': {'filename': 'somefile.txt', 'content': '1', 'content_type': 'image/jpeg'}, }, expect_errors=True, ) @@ -299,7 +299,7 @@ def test_uploadfile_cmis_gateway_error(app, setup, monkeypatch): from passerelle.utils.jsonresponse import APIError cmis_gateway = Mock() - cmis_gateway.create_doc.side_effect = APIError("some error") + cmis_gateway.create_doc.side_effect = APIError('some error') cmis_gateway_cls = Mock(return_value=cmis_gateway) import passerelle.apps.cmis.models @@ -307,12 +307,12 @@ def test_uploadfile_cmis_gateway_error(app, setup, monkeypatch): response = app.post_json( '/cmis/slug-cmis/uploadfile', params={ - "path": "/some/folder/structure", - "file": {"filename": "file_name", "content": b64encode('aaaa'), "content_type": "image/jpeg"}, + 'path': '/some/folder/structure', + 'file': {'filename': 'file_name', 'content': b64encode('aaaa'), 'content_type': 'image/jpeg'}, }, ) assert response.json['err'] == 1 - assert response.json['err_desc'].startswith("some error") + assert response.json['err_desc'].startswith('some error') def test_get_or_create_folder_already_existing(monkeypatch): @@ -373,7 +373,7 @@ def test_get_or_create_folder_with_some_existing_and_some_not(monkeypatch): elif path == '/whatever/man': raise ObjectNotFoundException() else: - raise Exception("I should not be called with: %s" % path) + raise Exception('I should not be called with: %s' % path) root_folder = Mock() default_repository = Mock(rootFolder=root_folder) @@ -404,12 +404,12 @@ def test_create_doc(): @pytest.mark.parametrize( - "cmis_exc,err_msg", + 'cmis_exc,err_msg', [ - (PermissionDeniedException, "permission denied"), - (UpdateConflictException, "update conflict"), - (InvalidArgumentException, "invalid property"), - (CmisException, "cmis binding error"), + (PermissionDeniedException, 'permission denied'), + (UpdateConflictException, 'update conflict'), + (InvalidArgumentException, 'invalid property'), + (CmisException, 'cmis binding error'), ], ) def test_wrap_cmis_error(app, setup, monkeypatch, cmis_exc, err_msg): @@ -418,7 +418,7 @@ def test_wrap_cmis_error(app, setup, monkeypatch, cmis_exc, err_msg): @wrap_cmis_error def dummy_func(): - raise cmis_exc("some error") + raise cmis_exc('some error') with pytest.raises(APIError) as excinfo: dummy_func() @@ -510,11 +510,11 @@ def test_cmis_types_view(setup, app, admin_user, monkeypatch): assert all(child.id in resp.text for child in root_type1.children) resp = resp.click(children[0].id) - assert "No more children." in resp.text + assert 'No more children.' in resp.text - resp = resp.click("Back to base types list") + resp = resp.click('Back to base types list') resp = resp.click(root_type2.id) - assert "No more children." in resp.text + assert 'No more children.' in resp.text resp = app.get('/manage/cmis/slug-cmis/type?id=wrong', status=404) @@ -527,9 +527,9 @@ def test_raw_uploadfile(app, setup, debug, caplog): file:='{"filename": "test2", "content": "c2FsdXQK"}' path=/test-eo """ caplog.set_level('DEBUG') - file_name = "test2" + file_name = 'test2' file_content = 'salut\n' - path = "/test-eo" + path = '/test-eo' url = reverse( 'generic-endpoint', kwargs={'connector': 'cmis', 'endpoint': 'uploadfile', 'slug': setup.slug} ) @@ -552,13 +552,13 @@ def test_raw_uploadfile(app, setup, debug, caplog): responses.add(responses.POST, 'http://example.com/cmisatom/test/children', body=cmis3_body, status=200) params = { - "path": path, - "file": {"filename": file_name, "content": b64encode(file_content), "content_type": "image/jpeg"}, + 'path': path, + 'file': {'filename': file_name, 'content': b64encode(file_content), 'content_type': 'image/jpeg'}, } response = app.post_json(url, params=params) json_result = response.json assert json_result['err'] == 0 - assert json_result['data']['properties']['cmis:objectTypeId'] == "cmis:document" + assert json_result['data']['properties']['cmis:objectTypeId'] == 'cmis:document' assert json_result['data']['properties']['cmis:name'] == file_name assert not any('cmislib' in record.name for record in caplog.records) diff --git a/tests/test_cryptor.py b/tests/test_cryptor.py index 69d678c9..79603746 100644 --- a/tests/test_cryptor.py +++ b/tests/test_cryptor.py @@ -78,13 +78,13 @@ def test_cryptor_restricted_access(app, cryptor): endpoint = tests.utils.generic_endpoint_url('cryptor', 'file-encrypt', slug=cryptor.slug) assert endpoint == '/cryptor/test/file-encrypt' resp = app.get(endpoint, status=405) - resp = app.post_json(endpoint, params={"foo": "bar"}, status=403) + resp = app.post_json(endpoint, params={'foo': 'bar'}, status=403) assert resp.json['err'] == 1 assert 'PermissionDenied' in resp.json['err_class'] endpoint = tests.utils.generic_endpoint_url('cryptor', 'file-decrypt', slug=cryptor.slug) + '/uuid' assert endpoint == '/cryptor/test/file-decrypt/uuid' - resp = app.post_json(endpoint, params={"foo": "bar"}, status=405) + resp = app.post_json(endpoint, params={'foo': 'bar'}, status=405) resp = app.get(endpoint, status=403) assert resp.json['err'] == 1 assert 'PermissionDenied' in resp.json['err_class'] @@ -104,11 +104,11 @@ def test_cryptor_bad_requests(app, cryptor): endpoint = tests.utils.generic_endpoint_url('cryptor', 'file-encrypt', slug=cryptor.slug) for bad_payload in ( 'error', - {"foo": "bar"}, - ["not", "a", "dict"], - {"file": {"filename": "f", "content_type": "ct"}}, - {"file": {"filename": "f", "content_type": "ct", "content": None}}, - {"file": {"filename": "f", "content_type": "ct", "content": "NotBase64"}}, + {'foo': 'bar'}, + ['not', 'a', 'dict'], + {'file': {'filename': 'f', 'content_type': 'ct'}}, + {'file': {'filename': 'f', 'content_type': 'ct', 'content': None}}, + {'file': {'filename': 'f', 'content_type': 'ct', 'content': 'NotBase64'}}, ): resp = app.post_json(endpoint, params=bad_payload, status=400) assert resp.json['err'] == 1 @@ -131,7 +131,7 @@ def test_cryptor_encrypt_decrypt(app, cryptor): # encrypt endpoint = tests.utils.generic_endpoint_url('cryptor', 'file-encrypt', slug=cryptor.slug) content = force_str(base64.b64encode(b'this is foo and bar')) - payload = {"file": {"filename": "foo.txt", "content_type": "text/plain", "content": content}} + payload = {'file': {'filename': 'foo.txt', 'content_type': 'text/plain', 'content': content}} resp = app.post_json(endpoint, params=payload, status=200) assert resp.json['err'] == 0 diff --git a/tests/test_csv_datasource.py b/tests/test_csv_datasource.py index ad84f608..e0f49ea5 100644 --- a/tests/test_csv_datasource.py +++ b/tests/test_csv_datasource.py @@ -889,7 +889,7 @@ def test_update(admin_user, app, setup): resp = app.put(url, params=body, headers=headers, status=403) assert resp.json['err'] assert 'PermissionDenied' in resp.json['err_class'] - assert resp.json['err_class'] == "django.core.exceptions.PermissionDenied" + assert resp.json['err_class'] == 'django.core.exceptions.PermissionDenied' # add can_update_file access api = ApiUser.objects.get() diff --git a/tests/test_dpark.py b/tests/test_dpark.py index 3764ab36..a3f9aeba 100644 --- a/tests/test_dpark.py +++ b/tests/test_dpark.py @@ -273,32 +273,32 @@ def test_subscriber_infos(dpark, app, get_service): replydata = { 'CodeRetour': '01', 'MessageRetour': 'Dossier existant', - "Adresse_BoitePostaleLieuDit": None, - "Adresse_CodePostal": "44000", - "Adresse_CodeSTI": "315553609651", - "Adresse_EtageEscalierAppartement": None, - "Adresse_Extension": 1, - "Adresse_ImmeubleBatimentResidence": None, - "Adresse_Localite": "Nantes", - "Adresse_NomVoie": "All\u00e9es Jean Jaur\u00e8s", - "Adresse_NumeroVoie": 80, - "Adresse_Quartier": "PERI", - "Demande_DateDebutAbo": "20180625", - "Demande_DateFinAbo": "20190624", - "Demande_DelaiAutorise": 30, - "Demande_ImmatVehicule1": "CX453AD", - "Demande_ImmatVehicule2": None, - "Demande_MarqueVehicule1": "CITROEN", - "Demande_MarqueVehicule2": None, - "Demande_ModeleVehicule1": "GS", - "Demande_ModeleVehicule2": None, - "Demande_NumeroDossier": 22952, - "Demandeur_Civilite": 1, - "Demandeur_Email": "spameggs@example.net", - "Demandeur_NomUsuel": "BAR", - "Demandeur_Prenom": "Foo Spam", - "Demandeur_TelephoneFixe": "0611111111", - "Demandeur_TelephonePortable": None, + 'Adresse_BoitePostaleLieuDit': None, + 'Adresse_CodePostal': '44000', + 'Adresse_CodeSTI': '315553609651', + 'Adresse_EtageEscalierAppartement': None, + 'Adresse_Extension': 1, + 'Adresse_ImmeubleBatimentResidence': None, + 'Adresse_Localite': 'Nantes', + 'Adresse_NomVoie': 'All\u00e9es Jean Jaur\u00e8s', + 'Adresse_NumeroVoie': 80, + 'Adresse_Quartier': 'PERI', + 'Demande_DateDebutAbo': '20180625', + 'Demande_DateFinAbo': '20190624', + 'Demande_DelaiAutorise': 30, + 'Demande_ImmatVehicule1': 'CX453AD', + 'Demande_ImmatVehicule2': None, + 'Demande_MarqueVehicule1': 'CITROEN', + 'Demande_MarqueVehicule2': None, + 'Demande_ModeleVehicule1': 'GS', + 'Demande_ModeleVehicule2': None, + 'Demande_NumeroDossier': 22952, + 'Demandeur_Civilite': 1, + 'Demandeur_Email': 'spameggs@example.net', + 'Demandeur_NomUsuel': 'BAR', + 'Demandeur_Prenom': 'Foo Spam', + 'Demandeur_TelephoneFixe': '0611111111', + 'Demandeur_TelephonePortable': None, } get_service.return_value = MockedService(replydata=replydata) resp = app.get(url) @@ -327,32 +327,32 @@ def test_subscriber_infos(dpark, app, get_service): replydata2 = { 'CodeRetour': '01', 'MessageRetour': 'Dossier existant', - "Adresse_BoitePostaleLieuDit": None, - "Adresse_CodePostal": "94000", - "Adresse_CodeSTI": "315553609651", - "Adresse_EtageEscalierAppartement": None, - "Adresse_Extension": 1, - "Adresse_ImmeubleBatimentResidence": None, - "Adresse_Localite": "Creteil", - "Adresse_NomVoie": "Allée les sablons", - "Adresse_NumeroVoie": 5, - "Adresse_Quartier": "HOOLI", - "Demande_DateDebutAbo": "20180430", - "Demande_DateFinAbo": None, - "Demande_DelaiAutorise": 30, - "Demande_ImmatVehicule1": "AA555BB", - "Demande_ImmatVehicule2": "XX333YY", - "Demande_MarqueVehicule1": "FORD", - "Demande_MarqueVehicule2": "MERCEDES", - "Demande_ModeleVehicule1": "Fiesta", - "Demande_ModeleVehicule2": "Serie A", - "Demande_NumeroDossier": 22955, - "Demandeur_Civilite": 1, - "Demandeur_Email": "spameggs@example.net", - "Demandeur_NomUsuel": "EGGS", - "Demandeur_Prenom": "Monty", - "Demandeur_TelephoneFixe": "0611111111", - "Demandeur_TelephonePortable": None, + 'Adresse_BoitePostaleLieuDit': None, + 'Adresse_CodePostal': '94000', + 'Adresse_CodeSTI': '315553609651', + 'Adresse_EtageEscalierAppartement': None, + 'Adresse_Extension': 1, + 'Adresse_ImmeubleBatimentResidence': None, + 'Adresse_Localite': 'Creteil', + 'Adresse_NomVoie': 'Allée les sablons', + 'Adresse_NumeroVoie': 5, + 'Adresse_Quartier': 'HOOLI', + 'Demande_DateDebutAbo': '20180430', + 'Demande_DateFinAbo': None, + 'Demande_DelaiAutorise': 30, + 'Demande_ImmatVehicule1': 'AA555BB', + 'Demande_ImmatVehicule2': 'XX333YY', + 'Demande_MarqueVehicule1': 'FORD', + 'Demande_MarqueVehicule2': 'MERCEDES', + 'Demande_ModeleVehicule1': 'Fiesta', + 'Demande_ModeleVehicule2': 'Serie A', + 'Demande_NumeroDossier': 22955, + 'Demandeur_Civilite': 1, + 'Demandeur_Email': 'spameggs@example.net', + 'Demandeur_NomUsuel': 'EGGS', + 'Demandeur_Prenom': 'Monty', + 'Demandeur_TelephoneFixe': '0611111111', + 'Demandeur_TelephonePortable': None, } # there will be only one call as first pairing is now cached get_service.side_effect = [MockedService(replydata=replydata2)] @@ -584,32 +584,32 @@ def test_registration(dpark, app, application_thirdparty_subscription, soap_mock url = '/dpark/test/register/' params = { - "address_complement1": "", - "address_complement2": "", - "address_locality": "Toulouse", - "address_sticode": "315553609651", - "address_streetext": "1", - "address_streetname": "", - "address_streetno": "16", - "address_zipcode": "31000", - "applicant_email": "sh@eggs.org", - "applicant_firstnames": "Spam Ham", - "applicant_lastname": "EGGS", - "applicant_mobilephone": "0655443322", - "applicant_phone": "", - "applicant_title": "1", - "application_car1_brand": "Peugeot", - "application_car1_exemption": "8", - "application_car1_model": "206", - "application_car1_plate": "AA777ZZ", - "application_id": "12-4", - "application_payment_type": "10", - "application_thirdparty_subscription": application_thirdparty_subscription, - "application_type": 1, + 'address_complement1': '', + 'address_complement2': '', + 'address_locality': 'Toulouse', + 'address_sticode': '315553609651', + 'address_streetext': '1', + 'address_streetname': '', + 'address_streetno': '16', + 'address_zipcode': '31000', + 'applicant_email': 'sh@eggs.org', + 'applicant_firstnames': 'Spam Ham', + 'applicant_lastname': 'EGGS', + 'applicant_mobilephone': '0655443322', + 'applicant_phone': '', + 'applicant_title': '1', + 'application_car1_brand': 'Peugeot', + 'application_car1_exemption': '8', + 'application_car1_model': '206', + 'application_car1_plate': 'AA777ZZ', + 'application_id': '12-4', + 'application_payment_type': '10', + 'application_thirdparty_subscription': application_thirdparty_subscription, + 'application_type': 1, } # with missing parameter app.post_json(url, params=params, status=400) - params['address_district'] = "PERI" + params['address_district'] = 'PERI' # with an incomplete application resp = app.post_json(url, params=params) @@ -675,10 +675,10 @@ def test_send_files(dpark, app, soap_mock, settings, freezer): params['impot_revenu'] = 'this is my tax notice' resp = app.post_json(url, params=params) errors = [ - " value is not a dict", - " is either absent or has an invalid value", - " is either absent or has an invalid value", - "Invalid document type: ", + ' value is not a dict', + ' is either absent or has an invalid value', + ' is either absent or has an invalid value', + 'Invalid document type: ', ] assert resp.json['err'] == 1 assert resp.json['err_desc'] == '%s' % errors @@ -729,29 +729,29 @@ def test_send_files(dpark, app, soap_mock, settings, freezer): def test_registration_double_plaque(dpark, app, soap_mock, application_thirdparty_subscription): url = '/dpark/test/register/' params = { - "address_complement1": "", - "address_complement2": "", - "address_district": "PERI", - "address_locality": "Toulouse", - "address_sticode": "315553609651", - "address_streetext": "1", - "address_streetname": "", - "address_streetno": "16", - "address_zipcode": "31000", - "applicant_email": "sh@eggs.org", - "applicant_firstnames": "Spam Ham", - "applicant_lastname": "EGGS", - "applicant_mobilephone": "0655443322", - "applicant_phone": "", - "applicant_title": "1", - "application_car1_brand": "Peugeot", - "application_car1_exemption": "8", - "application_car1_model": "206", - "application_car1_plate": "AA777ZZ", - "application_id": "12-4", - "application_payment_type": "10", - "application_thirdparty_subscription": application_thirdparty_subscription, - "application_type": 1, + 'address_complement1': '', + 'address_complement2': '', + 'address_district': 'PERI', + 'address_locality': 'Toulouse', + 'address_sticode': '315553609651', + 'address_streetext': '1', + 'address_streetname': '', + 'address_streetno': '16', + 'address_zipcode': '31000', + 'applicant_email': 'sh@eggs.org', + 'applicant_firstnames': 'Spam Ham', + 'applicant_lastname': 'EGGS', + 'applicant_mobilephone': '0655443322', + 'applicant_phone': '', + 'applicant_title': '1', + 'application_car1_brand': 'Peugeot', + 'application_car1_exemption': '8', + 'application_car1_model': '206', + 'application_car1_plate': 'AA777ZZ', + 'application_id': '12-4', + 'application_payment_type': '10', + 'application_thirdparty_subscription': application_thirdparty_subscription, + 'application_type': 1, 'double_plaque': '1', } # with missing parameter diff --git a/tests/test_esabora.py b/tests/test_esabora.py index 07d6479b..6fdab2dc 100644 --- a/tests/test_esabora.py +++ b/tests/test_esabora.py @@ -102,7 +102,7 @@ def test_do_treatment(app, connector): 'content': base64.b64encode(file_content2).decode(), }, # empty field, will be skipped - 'PJ_Documents/2': "", + 'PJ_Documents/2': '', # ensure we handle single documents as well 'PJ_Documents_Autre': { 'filename': 'test3.pdf', @@ -182,29 +182,29 @@ def test_do_search(app, connector): 'id': 'id1', 'text': 'Foo 1', 'internalid': 'id1', - "column_1": 'Foo 1', - "column_2": 'Foo 2', - "column_3": 'Foo 3', + 'column_1': 'Foo 1', + 'column_2': 'Foo 2', + 'column_3': 'Foo 3', }, { 'id': 'id2', 'text': 'Bar 1', 'internalid': 'id2', - "column_1": 'Bar 1', - "column_2": 'Bar 2', - "column_3": 'Bar 3', + 'column_1': 'Bar 1', + 'column_2': 'Bar 2', + 'column_3': 'Bar 3', }, ], 'meta': { 'searchId': '23568', 'nbResults': 2, 'columns_name': { - "column_1": 'Column 1', - "column_2": 'Column 2', - "column_3": 'Column 3', + 'column_1': 'Column 1', + 'column_2': 'Column 2', + 'column_3': 'Column 3', }, 'keys_name': { - "internalid": 'internal.id', + 'internalid': 'internal.id', }, }, } diff --git a/tests/test_esirius.py b/tests/test_esirius.py index 4175a644..bc285524 100644 --- a/tests/test_esirius.py +++ b/tests/test_esirius.py @@ -106,7 +106,7 @@ def test_token(connector): return httmock.response(200) with httmock.HTTMock(esirius_mock): - connector.request('an/uri/', method='get', params="somes") + connector.request('an/uri/', method='get', params='somes') @pytest.mark.parametrize('secret_key', ['yyy', '']) @@ -127,7 +127,7 @@ def test_pre_request(connector, secret_key): connector.secret_key = secret_key connector.save() with httmock.HTTMock(esirius_mock): - connector.request('an/uri/', method='get', params="somes") + connector.request('an/uri/', method='get', params='somes') @pytest.mark.parametrize( @@ -144,7 +144,7 @@ def test_post_request(connector, status_code, content, a_dict): with pytest.raises(APIError) as exc: with httmock.HTTMock(esirius_mock): - connector.request('an/uri/', params="somes") + connector.request('an/uri/', params='somes') assert exc.value.err assert exc.value.data['status_code'] == status_code @@ -293,8 +293,8 @@ def test_get_appointment_error(app, connector): assert resp.json['err_class'] == 'passerelle.utils.jsonresponse.APIError' assert resp.json['data']['status_code'] == 404 assert resp.json['data']['json_content'] == { - "code": "Not Found", - "message": "Le rendez-vous {0} n'existe pas", + 'code': 'Not Found', + 'message': "Le rendez-vous {0} n'existe pas", } diff --git a/tests/test_esirius_swi.py b/tests/test_esirius_swi.py index 3079437e..aae902b4 100644 --- a/tests/test_esirius_swi.py +++ b/tests/test_esirius_swi.py @@ -34,7 +34,7 @@ def get_xml_file(filename): def get_json_file(filename): - with open(os.path.join(TEST_BASE_DIR, "%s.json" % filename)) as fd: + with open(os.path.join(TEST_BASE_DIR, '%s.json' % filename)) as fd: return json.load(fd) diff --git a/tests/test_gdema.py b/tests/test_gdema.py index 99c031da..c38cb56c 100644 --- a/tests/test_gdema.py +++ b/tests/test_gdema.py @@ -61,17 +61,17 @@ CREATE_INPUT = { # from Publik system } CONVERTED_INPUT = { # to GDEMA webservice - "Typology": { - "Id": "21012", + 'Typology': { + 'Id': '21012', }, - "Description": "this is a test", - "Localization": {"Town": "482", "TownLabel": "STRASBOURG"}, - "Origin": "2", - "Priority": "2", - "ReceptDate": "/Date(1165964400000+0100)/", - "Files": [ - {"Base64Stream": "dW4=", "Name": "test1.txt"}, - {"Base64Stream": "ZGV1eA==", "Name": "test2.txt"}, + 'Description': 'this is a test', + 'Localization': {'Town': '482', 'TownLabel': 'STRASBOURG'}, + 'Origin': '2', + 'Priority': '2', + 'ReceptDate': '/Date(1165964400000+0100)/', + 'Files': [ + {'Base64Stream': 'dW4=', 'Name': 'test1.txt'}, + {'Base64Stream': 'ZGV1eA==', 'Name': 'test2.txt'}, ], } diff --git a/tests/test_generic_endpoint.py b/tests/test_generic_endpoint.py index 02d17578..d2e282db 100644 --- a/tests/test_generic_endpoint.py +++ b/tests/test_generic_endpoint.py @@ -129,13 +129,13 @@ def test_proxy_logger(mocked_get, caplog, app, arcgis): assert record.levelno == 20 assert record.levelname == 'INFO' assert record.name == 'passerelle.resource.arcgis.test' - assert "endpoint GET /arcgis/test/mapservice-query?" in record.message + assert 'endpoint GET /arcgis/test/mapservice-query?' in record.message assert not hasattr(record, 'connector_result') record = caplog.records[1] assert record.levelno == 10 assert record.levelname == 'DEBUG' assert record.name == 'passerelle.resource.arcgis.test' - assert "endpoint GET /arcgis/test/mapservice-query?" in record.message + assert 'endpoint GET /arcgis/test/mapservice-query?' in record.message assert hasattr(record, 'connector_result') data = resp.json['data'] @@ -444,7 +444,7 @@ def test_endpoint_decorator_pre_process(db, app): with patch_init, patch_object: resp = app.post_json(url_foo, params=payload, status=400) assert resp.json['err'] == 1 - assert resp.json['err_desc'] == "foo/1/id: None is not of type %s" % repr('integer') + assert resp.json['err_desc'] == 'foo/1/id: None is not of type %s' % repr('integer') with patch_init, patch_object: resp = app.post_json(url_bar, params=payload) assert resp.json['err'] == 0 @@ -693,7 +693,7 @@ def test_endpoint_cookies(app, db, monkeypatch): @endpoint(methods=['get'], perm='OPEN') def httpcall(obj, request): with responses.RequestsMock() as rsps: - rsps.get('https://foo.invalid/set-cookie', json={}, headers={"set-cookie": "foo=bar;"}) + rsps.get('https://foo.invalid/set-cookie', json={}, headers={'set-cookie': 'foo=bar;'}) rsps.get( 'https://foo.invalid/get', json={}, diff --git a/tests/test_grandlyon_streetsections.py b/tests/test_grandlyon_streetsections.py index 2459c458..9aaa06bd 100644 --- a/tests/test_grandlyon_streetsections.py +++ b/tests/test_grandlyon_streetsections.py @@ -62,7 +62,7 @@ def test_partial_bornes(app, connector): ) response = app.get( - "/grandlyon-streetsections/gl-streetsections/section_info" + '/grandlyon-streetsections/gl-streetsections/section_info' "?streetname=Rue d'Essling&commune=Lyon&streetnumber=20" ) assert response.json['err'] == 0 @@ -272,7 +272,7 @@ def test_non_uppercase_communes(app, connector): with HTTMock(data_mock): connector.daily() response = app.get( - "/grandlyon-streetsections/gl-streetsections/section_info" + '/grandlyon-streetsections/gl-streetsections/section_info' "?streetname=Rue de l'Avenir&commune=Villeurbanne&streetnumber=8" ) assert response.json['err'] == 0 diff --git a/tests/test_greco.py b/tests/test_greco.py index 38ab2cef..df68ad87 100644 --- a/tests/test_greco.py +++ b/tests/test_greco.py @@ -39,12 +39,12 @@ WSDL_FILENAME = os.path.join(TEST_BASE_DIR, 'greco.wsdl') def get_xml_file(filename): - with open(os.path.join(TEST_BASE_DIR, "%s.xml" % filename), 'rb') as desc: + with open(os.path.join(TEST_BASE_DIR, '%s.xml' % filename), 'rb') as desc: return desc.read() def get_json_file(filename): - with open(os.path.join(TEST_BASE_DIR, "%s.json" % filename)) as desc: + with open(os.path.join(TEST_BASE_DIR, '%s.json' % filename)) as desc: return desc.read() @@ -679,9 +679,9 @@ def test_greco_add_information_ko(mocked_post, app, conn): resp = app.get(url, params={'idgreco': '538593', 'iddemande': 'MDFGDZRF', 'information': 'my info'}) assert not resp.json['err'] assert resp.json['data'] == { - "iddemande": "MDFGDZRF", - "idgreco": "538593", - "motifsrejet": "Le complément d\\'information ne peut être ajouté qu\\'à une demande ayant le statut \\'incomplète\\'\\r\\n", + 'iddemande': 'MDFGDZRF', + 'idgreco': '538593', + 'motifsrejet': "Le complément d\\'information ne peut être ajouté qu\\'à une demande ayant le statut \\'incomplète\\'\\r\\n", } diff --git a/tests/test_grenoble_gru.py b/tests/test_grenoble_gru.py index fc7c7c91..2752c279 100644 --- a/tests/test_grenoble_gru.py +++ b/tests/test_grenoble_gru.py @@ -33,24 +33,24 @@ def setup(db): BASIC_PAYLOAD = { - "application_id": "12-1", - "applicant_lastname": "Doe", - "applicant_firstname": "John", - "applicant_phone": "0102030405", - "applicant_email": "j.doe@example.org", - "applicant_required_reply": '0', - "applicant_contact_mode": '3', - "applicant_status": '8', - "intervention_address_type": '1', - "intervention_street_number": '168', - "intervention_street_name": "Du Chateau", - "intervention_address_insee": "38185", - "intervention_sector": '2', - "intervention_number_type": '5', - "intervention_datetime": "2011-02-07T11:34", - "urgent_demand": '1', - "dysfonction_type": '3', - "intervention_reason": '2', + 'application_id': '12-1', + 'applicant_lastname': 'Doe', + 'applicant_firstname': 'John', + 'applicant_phone': '0102030405', + 'applicant_email': 'j.doe@example.org', + 'applicant_required_reply': '0', + 'applicant_contact_mode': '3', + 'applicant_status': '8', + 'intervention_address_type': '1', + 'intervention_street_number': '168', + 'intervention_street_name': 'Du Chateau', + 'intervention_address_insee': '38185', + 'intervention_sector': '2', + 'intervention_number_type': '5', + 'intervention_datetime': '2011-02-07T11:34', + 'urgent_demand': '1', + 'dysfonction_type': '3', + 'intervention_reason': '2', 'dem_pav': 'déchetterie', 'comment_description': 'thank you 😘', } diff --git a/tests/test_isere_ens.py b/tests/test_isere_ens.py index 99c7653c..628235b5 100644 --- a/tests/test_isere_ens.py +++ b/tests/test_isere_ens.py @@ -27,7 +27,7 @@ from passerelle.contrib.isere_ens.models import API_VERSION, IsereENS @pytest.fixture def setup(db): return tests.utils.setup_access_rights( - IsereENS.objects.create(slug="test", base_url="https://ens38.example.net/", token="toktok") + IsereENS.objects.create(slug='test', base_url='https://ens38.example.net/', token='toktok') ) @@ -140,263 +140,263 @@ API_VERSIONS = [vers[1] for vers in API_VERSION] @pytest.mark.parametrize('api_version', API_VERSIONS) -@mock.patch("passerelle.utils.Request.get") +@mock.patch('passerelle.utils.Request.get') def test_get_sites(mocked_get, app, setup, api_version): setup.api_version = api_version setup.save() mocked_get.return_value = tests.utils.FakedResponse(content=SITES_RESPONSE, status_code=200) endpoint = reverse( - "generic-endpoint", - kwargs={"connector": "isere-ens", "slug": setup.slug, "endpoint": "sites"}, + 'generic-endpoint', + kwargs={'connector': 'isere-ens', 'slug': setup.slug, 'endpoint': 'sites'}, ) response = app.get(endpoint) - assert mocked_get.call_args[0][0].endswith("api/" + api_version + "/site") - assert mocked_get.call_args[1]["headers"]["token"] == "toktok" + assert mocked_get.call_args[0][0].endswith('api/' + api_version + '/site') + assert mocked_get.call_args[1]['headers']['token'] == 'toktok' assert mocked_get.call_count == 1 - assert "data" in response.json - assert response.json["err"] == 0 - for item in response.json["data"]: - assert "id" in item - assert "text" in item - assert "city" in item - assert "code" in item + assert 'data' in response.json + assert response.json['err'] == 0 + for item in response.json['data']: + assert 'id' in item + assert 'text' in item + assert 'city' in item + assert 'code' in item # test cache system response = app.get(endpoint) assert mocked_get.call_count == 1 - response = app.get(endpoint + "?q=etangs") - assert len(response.json["data"]) == 2 - response = app.get(endpoint + "?q=CourTe") - assert len(response.json["data"]) == 1 - response = app.get(endpoint + "?kind=social") - assert len(response.json["data"]) == 2 + response = app.get(endpoint + '?q=etangs') + assert len(response.json['data']) == 2 + response = app.get(endpoint + '?q=CourTe') + assert len(response.json['data']) == 1 + response = app.get(endpoint + '?kind=social') + assert len(response.json['data']) == 2 mocked_get.return_value = tests.utils.FakedResponse(content=SD29B_RESPONSE, status_code=200) - response = app.get(endpoint + "?id=SD29b") - assert mocked_get.call_args[0][0].endswith("api/" + api_version + "/site/SD29b") - assert len(response.json["data"]) == 1 - assert response.json["data"][0]["id"] == "SD29b" - assert response.json["data"][0]["dogs"] == "LEASH" + response = app.get(endpoint + '?id=SD29b') + assert mocked_get.call_args[0][0].endswith('api/' + api_version + '/site/SD29b') + assert len(response.json['data']) == 1 + assert response.json['data'][0]['id'] == 'SD29b' + assert response.json['data'][0]['dogs'] == 'LEASH' # bad response for ENS API mocked_get.return_value = tests.utils.FakedResponse(content=SITE_404_RESPONSE, status_code=404) - response = app.get(endpoint + "?id=SD29x") - assert mocked_get.call_args[0][0].endswith("api/" + api_version + "/site/SD29x") - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"].startswith("error status:404") - assert response.json["data"]["status_code"] == 404 - assert response.json["data"]["json_content"]["message"] == "Site not found with code SD29x" - mocked_get.return_value = tests.utils.FakedResponse(content="crash", status_code=500) - response = app.get(endpoint + "?id=foo500") - assert mocked_get.call_args[0][0].endswith("api/" + api_version + "/site/foo500") - assert response.json["err"] == 1 - assert response.json["err_desc"].startswith("error status:500") - assert response.json["err_class"].endswith("APIError") - assert response.json["data"]["status_code"] == 500 - assert response.json["data"]["json_content"] is None + response = app.get(endpoint + '?id=SD29x') + assert mocked_get.call_args[0][0].endswith('api/' + api_version + '/site/SD29x') + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'].startswith('error status:404') + assert response.json['data']['status_code'] == 404 + assert response.json['data']['json_content']['message'] == 'Site not found with code SD29x' + mocked_get.return_value = tests.utils.FakedResponse(content='crash', status_code=500) + response = app.get(endpoint + '?id=foo500') + assert mocked_get.call_args[0][0].endswith('api/' + api_version + '/site/foo500') + assert response.json['err'] == 1 + assert response.json['err_desc'].startswith('error status:500') + assert response.json['err_class'].endswith('APIError') + assert response.json['data']['status_code'] == 500 + assert response.json['data']['json_content'] is None mocked_get.return_value = tests.utils.FakedResponse(content=None, status_code=204) - response = app.get(endpoint + "?id=foo204") - assert mocked_get.call_args[0][0].endswith("api/" + api_version + "/site/foo204") - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "abnormal empty response" - mocked_get.return_value = tests.utils.FakedResponse(content="not json", status_code=200) - response = app.get(endpoint + "?id=foo") - assert mocked_get.call_args[0][0].endswith("api/" + api_version + "/site/foo") - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"].startswith("invalid JSON in response:") + response = app.get(endpoint + '?id=foo204') + assert mocked_get.call_args[0][0].endswith('api/' + api_version + '/site/foo204') + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'abnormal empty response' + mocked_get.return_value = tests.utils.FakedResponse(content='not json', status_code=200) + response = app.get(endpoint + '?id=foo') + assert mocked_get.call_args[0][0].endswith('api/' + api_version + '/site/foo') + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'].startswith('invalid JSON in response:') @pytest.mark.parametrize('api_version', API_VERSIONS) -@mock.patch("passerelle.utils.Request.get") +@mock.patch('passerelle.utils.Request.get') def test_get_animators(mocked_get, app, setup, api_version): setup.api_version = api_version setup.save() mocked_get.return_value = tests.utils.FakedResponse(content=ANIMATORS_RESPONSE, status_code=200) endpoint = reverse( - "generic-endpoint", - kwargs={"connector": "isere-ens", "slug": setup.slug, "endpoint": "animators"}, + 'generic-endpoint', + kwargs={'connector': 'isere-ens', 'slug': setup.slug, 'endpoint': 'animators'}, ) response = app.get(endpoint) - assert mocked_get.call_args[0][0].endswith("api/" + api_version + "/schoolAnimator") + assert mocked_get.call_args[0][0].endswith('api/' + api_version + '/schoolAnimator') assert mocked_get.call_count == 1 - assert "data" in response.json - assert response.json["err"] == 0 - for item in response.json["data"]: - assert "id" in item - assert "text" in item - assert "first_name" in item - assert "email" in item + assert 'data' in response.json + assert response.json['err'] == 0 + for item in response.json['data']: + assert 'id' in item + assert 'text' in item + assert 'first_name' in item + assert 'email' in item # test cache system response = app.get(endpoint) assert mocked_get.call_count == 1 - response = app.get(endpoint + "?q=Kael") - assert len(response.json["data"]) == 1 - response = app.get(endpoint + "?q=association") - assert len(response.json["data"]) == 2 - response = app.get(endpoint + "?q=mail.grd") - assert len(response.json["data"]) == 2 - response = app.get(endpoint + "?id=2") - assert len(response.json["data"]) == 1 - assert response.json["data"][0]["first_name"] == "Michael" + response = app.get(endpoint + '?q=Kael') + assert len(response.json['data']) == 1 + response = app.get(endpoint + '?q=association') + assert len(response.json['data']) == 2 + response = app.get(endpoint + '?q=mail.grd') + assert len(response.json['data']) == 2 + response = app.get(endpoint + '?id=2') + assert len(response.json['data']) == 1 + assert response.json['data'][0]['first_name'] == 'Michael' @pytest.mark.parametrize('api_version', API_VERSIONS) -@mock.patch("passerelle.utils.Request.get") +@mock.patch('passerelle.utils.Request.get') def test_get_site_calendar(mocked_get, app, setup, freezer, api_version): setup.api_version = api_version setup.save() - freezer.move_to("2021-01-21 12:00:00") + freezer.move_to('2021-01-21 12:00:00') mocked_get.return_value = tests.utils.FakedResponse(content=SITE_CALENDAR_RESPONSE, status_code=200) endpoint = reverse( - "generic-endpoint", + 'generic-endpoint', kwargs={ - "connector": "isere-ens", - "slug": setup.slug, - "endpoint": "site-calendar", + 'connector': 'isere-ens', + 'slug': setup.slug, + 'endpoint': 'site-calendar', }, ) - response = app.get(endpoint + "?site=SD29b") - assert mocked_get.call_args[0][0].endswith("api/" + api_version + "/site/SD29b/calendar") - assert mocked_get.call_args[1]["params"]["start_date"] == "2021-01-21" - assert mocked_get.call_args[1]["params"]["end_date"] == "2021-04-23" - assert response.json["err"] == 0 - assert len(response.json["data"]) == 3 + response = app.get(endpoint + '?site=SD29b') + assert mocked_get.call_args[0][0].endswith('api/' + api_version + '/site/SD29b/calendar') + assert mocked_get.call_args[1]['params']['start_date'] == '2021-01-21' + assert mocked_get.call_args[1]['params']['end_date'] == '2021-04-23' + assert response.json['err'] == 0 + assert len(response.json['data']) == 3 - assert response.json["data"][0]["disabled"] is False - assert response.json["data"][1]["disabled"] is False - assert response.json["data"][2]["disabled"] is True - assert response.json["data"][0]["status"] == "open" - assert response.json["data"][1]["status"] == "partially-open" - assert response.json["data"][2]["status"] == "closed" - assert response.json["data"][0]["details"] == "Morning (available), Lunch (closed), Afternoon (available)" - assert response.json["data"][1]["details"] == "Morning (available), Lunch (open), Afternoon (complete)" - assert response.json["data"][2]["details"] == "Morning (complete), Lunch (closed), Afternoon (complete)" + assert response.json['data'][0]['disabled'] is False + assert response.json['data'][1]['disabled'] is False + assert response.json['data'][2]['disabled'] is True + assert response.json['data'][0]['status'] == 'open' + assert response.json['data'][1]['status'] == 'partially-open' + assert response.json['data'][2]['status'] == 'closed' + assert response.json['data'][0]['details'] == 'Morning (available), Lunch (closed), Afternoon (available)' + assert response.json['data'][1]['details'] == 'Morning (available), Lunch (open), Afternoon (complete)' + assert response.json['data'][2]['details'] == 'Morning (complete), Lunch (closed), Afternoon (complete)' # "2020-01-21" - assert response.json["data"][0]["date_number"] == "21" - assert response.json["data"][0]["date_weekday"] == "Tuesday" - assert response.json["data"][0]["date_weekdayindex"] == "2" - assert response.json["data"][0]["date_weeknumber"] == "4" - assert response.json["data"][0]["date_month"] == "January 2020" + assert response.json['data'][0]['date_number'] == '21' + assert response.json['data'][0]['date_weekday'] == 'Tuesday' + assert response.json['data'][0]['date_weekdayindex'] == '2' + assert response.json['data'][0]['date_weeknumber'] == '4' + assert response.json['data'][0]['date_month'] == 'January 2020' - response = app.get(endpoint + "?site=SD29b&start_date=2021-01-22") - assert mocked_get.call_args[1]["params"]["start_date"] == "2021-01-22" - assert mocked_get.call_args[1]["params"]["end_date"] == "2021-04-24" - assert response.json["err"] == 0 - response = app.get(endpoint + "?site=SD29b&start_date=2021-01-22&end_date=2021-01-30") - assert mocked_get.call_args[1]["params"]["start_date"] == "2021-01-22" - assert mocked_get.call_args[1]["params"]["end_date"] == "2021-01-30" - assert response.json["err"] == 0 - response = app.get(endpoint + "?site=SD29b&start_date=foo", status=400) - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "bad start_date format (foo), should be YYYY-MM-DD" - response = app.get(endpoint + "?site=SD29b&end_date=bar", status=400) - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "bad end_date format (bar), should be YYYY-MM-DD" + response = app.get(endpoint + '?site=SD29b&start_date=2021-01-22') + assert mocked_get.call_args[1]['params']['start_date'] == '2021-01-22' + assert mocked_get.call_args[1]['params']['end_date'] == '2021-04-24' + assert response.json['err'] == 0 + response = app.get(endpoint + '?site=SD29b&start_date=2021-01-22&end_date=2021-01-30') + assert mocked_get.call_args[1]['params']['start_date'] == '2021-01-22' + assert mocked_get.call_args[1]['params']['end_date'] == '2021-01-30' + assert response.json['err'] == 0 + response = app.get(endpoint + '?site=SD29b&start_date=foo', status=400) + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'bad start_date format (foo), should be YYYY-MM-DD' + response = app.get(endpoint + '?site=SD29b&end_date=bar', status=400) + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'bad end_date format (bar), should be YYYY-MM-DD' -@mock.patch("passerelle.utils.Request.post") +@mock.patch('passerelle.utils.Request.post') def test_post_book_v1(mocked_post, app, setup): mocked_post.return_value = tests.utils.FakedResponse(content=BOOK_RESPONSE, status_code=200) endpoint = reverse( - "generic-endpoint", + 'generic-endpoint', kwargs={ - "connector": "isere-ens", - "slug": setup.slug, - "endpoint": "site-booking", + 'connector': 'isere-ens', + 'slug': setup.slug, + 'endpoint': 'site-booking', }, ) book = { - "code": "resa", - "status": "OK", - "beneficiary_id": "42", - "beneficiary_first_name": "Foo", - "beneficiary_last_name": "Bar", - "beneficiary_email": "foobar@example.net", - "beneficiary_phone": "9876", - "beneficiary_cellphone": "06", - "entity_id": "38420D", - "entity_name": "Ecole FooBar", - "entity_type": "school", - "project": "Publik", - "site": "SD29b", - "applicant": "app", - "public": "GS", - "date": "2020-01-22", - "participants": "50", - "morning": True, - "lunch": False, - "afternoon": False, - "pmr": True, - "grade_levels": ["CP", "CE1"], - "animator": "42", + 'code': 'resa', + 'status': 'OK', + 'beneficiary_id': '42', + 'beneficiary_first_name': 'Foo', + 'beneficiary_last_name': 'Bar', + 'beneficiary_email': 'foobar@example.net', + 'beneficiary_phone': '9876', + 'beneficiary_cellphone': '06', + 'entity_id': '38420D', + 'entity_name': 'Ecole FooBar', + 'entity_type': 'school', + 'project': 'Publik', + 'site': 'SD29b', + 'applicant': 'app', + 'public': 'GS', + 'date': '2020-01-22', + 'participants': '50', + 'morning': True, + 'lunch': False, + 'afternoon': False, + 'pmr': True, + 'grade_levels': ['CP', 'CE1'], + 'animator': '42', } response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/1.0.0/booking") + assert mocked_post.call_args[0][0].endswith('api/1.0.0/booking') assert mocked_post.call_count == 1 assert mocked_post.call_args.kwargs['json']['booking']['schoolAnimator'] == 42 - assert response.json["err"] == 0 - assert response.json["data"]["status"] == "BOOKING" + assert response.json['err'] == 0 + assert response.json['data']['status'] == 'BOOKING' mocked_post.return_value = tests.utils.FakedResponse(content=BOOK_RESPONSE_OVERBOOKING, status_code=200) response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/1.0.0/booking") + assert mocked_post.call_args[0][0].endswith('api/1.0.0/booking') assert mocked_post.call_count == 2 - assert response.json["err"] == 0 - assert response.json["data"]["status"] == "OVERBOOKING" + assert response.json['err'] == 0 + assert response.json['data']['status'] == 'OVERBOOKING' mocked_post.return_value = tests.utils.FakedResponse(content=BOOK_RESPONSE_REFUSED, status_code=200) response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/1.0.0/booking") + assert mocked_post.call_args[0][0].endswith('api/1.0.0/booking') assert mocked_post.call_count == 3 - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "booking status is REFUSED" - assert response.json["data"]["status"] == "REFUSED" + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'booking status is REFUSED' + assert response.json['data']['status'] == 'REFUSED' mocked_post.return_value = tests.utils.FakedResponse(content="""["not", "a", "dict"]""", status_code=200) response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/1.0.0/booking") + assert mocked_post.call_args[0][0].endswith('api/1.0.0/booking') assert mocked_post.call_count == 4 - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "response is not a dict" - assert response.json["data"] == ["not", "a", "dict"] + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'response is not a dict' + assert response.json['data'] == ['not', 'a', 'dict'] mocked_post.return_value = tests.utils.FakedResponse(content="""{"foo": "bar"}""", status_code=200) response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/1.0.0/booking") + assert mocked_post.call_args[0][0].endswith('api/1.0.0/booking') assert mocked_post.call_count == 5 - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "no status in response" - assert response.json["data"] == {"foo": "bar"} + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'no status in response' + assert response.json['data'] == {'foo': 'bar'} - book["animator"] = "" + book['animator'] = '' mocked_post.return_value = tests.utils.FakedResponse(content=BOOK_RESPONSE, status_code=200) response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/1.0.0/booking") + assert mocked_post.call_args[0][0].endswith('api/1.0.0/booking') assert 'schoolAnimator' not in mocked_post.call_args.kwargs['json']['booking'] assert mocked_post.call_count == 6 - assert response.json["err"] == 0 - assert response.json["data"]["status"] == "BOOKING" + assert response.json['err'] == 0 + assert response.json['data']['status'] == 'BOOKING' - del book["animator"] + del book['animator'] mocked_post.return_value = tests.utils.FakedResponse(content=BOOK_RESPONSE, status_code=200) response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/1.0.0/booking") + assert mocked_post.call_args[0][0].endswith('api/1.0.0/booking') assert 'schoolAnimator' not in mocked_post.call_args.kwargs['json']['booking'] assert mocked_post.call_count == 7 - assert response.json["err"] == 0 - assert response.json["data"]["status"] == "BOOKING" + assert response.json['err'] == 0 + assert response.json['data']['status'] == 'BOOKING' del book['code'] response = app.post_json(endpoint, params=book, status=400) @@ -406,37 +406,37 @@ def test_post_book_v1(mocked_post, app, setup): @pytest.mark.parametrize('api_version', API_VERSIONS[1:]) -@mock.patch("passerelle.utils.Request.post") +@mock.patch('passerelle.utils.Request.post') def test_post_book(mocked_post, app, setup, api_version): setup.api_version = api_version setup.save() mocked_post.return_value = tests.utils.FakedResponse(content=BOOK_RESPONSE, status_code=200) endpoint = reverse( - "generic-endpoint", + 'generic-endpoint', kwargs={ - "connector": "isere-ens", - "slug": setup.slug, - "endpoint": "site-booking", + 'connector': 'isere-ens', + 'slug': setup.slug, + 'endpoint': 'site-booking', }, ) book = { - "site": "SD29b", - "date": "2020-01-22", - "pmr": True, - "morning": True, - "lunch": False, - "afternoon": False, - "participants": "50", - "animator": "42", - "group": "3", - "grade_levels": ["CP", "CE1"], - "beneficiary_first_name": "Foo", - "beneficiary_last_name": "Bar", - "beneficiary_email": "foobar@example.net", - "beneficiary_phone": "9876", + 'site': 'SD29b', + 'date': '2020-01-22', + 'pmr': True, + 'morning': True, + 'lunch': False, + 'afternoon': False, + 'participants': '50', + 'animator': '42', + 'group': '3', + 'grade_levels': ['CP', 'CE1'], + 'beneficiary_first_name': 'Foo', + 'beneficiary_last_name': 'Bar', + 'beneficiary_email': 'foobar@example.net', + 'beneficiary_phone': '9876', } response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/" + api_version + "/site/booking/school") + assert mocked_post.call_args[0][0].endswith('api/' + api_version + '/site/booking/school') assert mocked_post.call_count == 1 assert mocked_post.call_args.kwargs['json']['schoolAnimator'] == 42 assert mocked_post.call_args.kwargs['json']['participants'] == 50 @@ -444,69 +444,69 @@ def test_post_book(mocked_post, app, setup, api_version): assert mocked_post.call_args.kwargs['json']['beneficiary']['cellphone'] == '' assert 'idExternal' not in mocked_post.call_args.kwargs['json'] assert 'projectCode' not in mocked_post.call_args.kwargs['json'] - assert response.json["err"] == 0 - assert response.json["data"]["status"] == "BOOKING" + assert response.json['err'] == 0 + assert response.json['data']['status'] == 'BOOKING' book['external_id'] = '12-34' book['project'] = 'pc' response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/" + api_version + "/site/booking/school") + assert mocked_post.call_args[0][0].endswith('api/' + api_version + '/site/booking/school') assert mocked_post.call_count == 2 assert mocked_post.call_args.kwargs['json']['idExternal'] == '12-34' assert mocked_post.call_args.kwargs['json']['projectCode'] == 'pc' - assert response.json["err"] == 0 + assert response.json['err'] == 0 mocked_post.return_value = tests.utils.FakedResponse(content=BOOK_RESPONSE_OVERBOOKING, status_code=200) response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/" + api_version + "/site/booking/school") + assert mocked_post.call_args[0][0].endswith('api/' + api_version + '/site/booking/school') assert mocked_post.call_count == 3 - assert response.json["err"] == 0 - assert response.json["data"]["status"] == "OVERBOOKING" + assert response.json['err'] == 0 + assert response.json['data']['status'] == 'OVERBOOKING' mocked_post.return_value = tests.utils.FakedResponse(content=BOOK_RESPONSE_REFUSED, status_code=200) response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/" + api_version + "/site/booking/school") + assert mocked_post.call_args[0][0].endswith('api/' + api_version + '/site/booking/school') assert mocked_post.call_count == 4 - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "booking status is REFUSED" - assert response.json["data"]["status"] == "REFUSED" + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'booking status is REFUSED' + assert response.json['data']['status'] == 'REFUSED' mocked_post.return_value = tests.utils.FakedResponse(content="""["not", "a", "dict"]""", status_code=200) response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/" + api_version + "/site/booking/school") + assert mocked_post.call_args[0][0].endswith('api/' + api_version + '/site/booking/school') assert mocked_post.call_count == 5 - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "response is not a dict" - assert response.json["data"] == ["not", "a", "dict"] + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'response is not a dict' + assert response.json['data'] == ['not', 'a', 'dict'] mocked_post.return_value = tests.utils.FakedResponse(content="""{"foo": "bar"}""", status_code=200) response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/" + api_version + "/site/booking/school") + assert mocked_post.call_args[0][0].endswith('api/' + api_version + '/site/booking/school') assert mocked_post.call_count == 6 - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "no status in response" - assert response.json["data"] == {"foo": "bar"} + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'no status in response' + assert response.json['data'] == {'foo': 'bar'} - book["animator"] = "" + book['animator'] = '' mocked_post.return_value = tests.utils.FakedResponse(content=BOOK_RESPONSE, status_code=200) response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/" + api_version + "/site/booking/school") + assert mocked_post.call_args[0][0].endswith('api/' + api_version + '/site/booking/school') assert 'schoolAnimator' not in mocked_post.call_args.kwargs['json'] assert mocked_post.call_count == 7 - assert response.json["err"] == 0 - assert response.json["data"]["status"] == "BOOKING" + assert response.json['err'] == 0 + assert response.json['data']['status'] == 'BOOKING' - del book["animator"] + del book['animator'] mocked_post.return_value = tests.utils.FakedResponse(content=BOOK_RESPONSE, status_code=200) response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/" + api_version + "/site/booking/school") + assert mocked_post.call_args[0][0].endswith('api/' + api_version + '/site/booking/school') assert 'schoolAnimator' not in mocked_post.call_args.kwargs['json'] assert mocked_post.call_count == 8 - assert response.json["err"] == 0 - assert response.json["data"]["status"] == "BOOKING" + assert response.json['err'] == 0 + assert response.json['data']['status'] == 'BOOKING' book['group'] = 'G' response = app.post_json(endpoint, params=book, status=400) @@ -528,95 +528,95 @@ def test_post_book(mocked_post, app, setup, api_version): book['applicant'] = 'ecole 1' response = app.post_json(endpoint, params=book) - assert mocked_post.call_args[0][0].endswith("api/" + api_version + "/site/booking/school") + assert mocked_post.call_args[0][0].endswith('api/' + api_version + '/site/booking/school') assert mocked_post.call_args.kwargs['json']['schoolGroup'] is None assert mocked_post.call_args.kwargs['json']['applicant'] == 'ecole 1' @pytest.mark.parametrize('api_version', API_VERSIONS) -@mock.patch("passerelle.utils.Request.get") +@mock.patch('passerelle.utils.Request.get') def test_get_booking_status(mocked_get, app, setup, api_version): setup.api_version = api_version setup.save() endpoint = reverse( - "generic-endpoint", + 'generic-endpoint', kwargs={ - "connector": "isere-ens", - "slug": setup.slug, - "endpoint": "get-site-booking", + 'connector': 'isere-ens', + 'slug': setup.slug, + 'endpoint': 'get-site-booking', }, ) mocked_get.return_value = tests.utils.FakedResponse(content=BOOK_RESPONSE, status_code=200) - response = app.get(endpoint + "?code=123") + response = app.get(endpoint + '?code=123') if api_version == '1.0.0': - assert mocked_get.call_args[0][0].endswith("api/1.0.0/booking/123/status") + assert mocked_get.call_args[0][0].endswith('api/1.0.0/booking/123/status') else: - assert mocked_get.call_args[0][0].endswith("api/" + api_version + "/site/booking/school/123/status/") + assert mocked_get.call_args[0][0].endswith('api/' + api_version + '/site/booking/school/123/status/') assert mocked_get.call_count == 1 - assert response.json["err"] == 0 - assert response.json["data"]["status"] == "BOOKING" + assert response.json['err'] == 0 + assert response.json['data']['status'] == 'BOOKING' response = app.get(endpoint, status=400) # no code specified assert mocked_get.call_count == 1 mocked_get.return_value = tests.utils.FakedResponse(content="""["not", "a", "dict"]""", status_code=200) - response = app.get(endpoint + "?code=123") + response = app.get(endpoint + '?code=123') assert mocked_get.call_count == 2 - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "response is not a dict" - assert response.json["data"] == ["not", "a", "dict"] + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'response is not a dict' + assert response.json['data'] == ['not', 'a', 'dict'] mocked_get.return_value = tests.utils.FakedResponse(content="""{"foo": "bar"}""", status_code=200) - response = app.get(endpoint + "?code=123") + response = app.get(endpoint + '?code=123') assert mocked_get.call_count == 3 - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "no status in response" - assert response.json["data"] == {"foo": "bar"} + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'no status in response' + assert response.json['data'] == {'foo': 'bar'} @pytest.mark.parametrize('api_version', API_VERSIONS) -@mock.patch("passerelle.utils.Request.post") +@mock.patch('passerelle.utils.Request.post') def test_cancel_booking(mocked_post, app, setup, api_version): setup.api_version = api_version setup.save() endpoint = reverse( - "generic-endpoint", + 'generic-endpoint', kwargs={ - "connector": "isere-ens", - "slug": setup.slug, - "endpoint": "cancel-site-booking", + 'connector': 'isere-ens', + 'slug': setup.slug, + 'endpoint': 'cancel-site-booking', }, ) mocked_post.return_value = tests.utils.FakedResponse(content=CANCEL_RESPONSE, status_code=200) - response = app.post(endpoint + "?code=123") + response = app.post(endpoint + '?code=123') if api_version == '1.0.0': - assert response.json["err"] == 1 - assert response.json["data"] is None - assert response.json["err_desc"] == "not available on API v1.0.0" + assert response.json['err'] == 1 + assert response.json['data'] is None + assert response.json['err_desc'] == 'not available on API v1.0.0' return - assert mocked_post.call_args[0][0].endswith("api/" + api_version + "/site/booking/school/cancel/123") + assert mocked_post.call_args[0][0].endswith('api/' + api_version + '/site/booking/school/cancel/123') assert mocked_post.call_count == 1 - assert response.json["err"] == 0 - assert response.json["data"]["status"] == "CANCELED" + assert response.json['err'] == 0 + assert response.json['data']['status'] == 'CANCELED' response = app.post(endpoint, status=400) # no code specified assert mocked_post.call_count == 1 # same as before mocked_post.return_value = tests.utils.FakedResponse(content="""["not", "a", "dict"]""", status_code=200) - response = app.post(endpoint + "?code=123") + response = app.post(endpoint + '?code=123') assert mocked_post.call_count == 2 - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "response is not a dict" - assert response.json["data"] == ["not", "a", "dict"] + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'response is not a dict' + assert response.json['data'] == ['not', 'a', 'dict'] mocked_post.return_value = tests.utils.FakedResponse(content="""{"foo": "bar"}""", status_code=200) - response = app.post(endpoint + "?code=123") + response = app.post(endpoint + '?code=123') assert mocked_post.call_count == 3 - assert response.json["err"] == 1 - assert response.json["err_class"].endswith("APIError") - assert response.json["err_desc"] == "no status in response" - assert response.json["data"] == {"foo": "bar"} + assert response.json['err'] == 1 + assert response.json['err_class'].endswith('APIError') + assert response.json['err_desc'] == 'no status in response' + assert response.json['data'] == {'foo': 'bar'} diff --git a/tests/test_iws.py b/tests/test_iws.py index 5b64073f..df97c854 100644 --- a/tests/test_iws.py +++ b/tests/test_iws.py @@ -27,12 +27,12 @@ def setup(db): def create_params(**kwargs): res = { - "firstname": "John", - "lastname": "Doe", - "description": "four : 1", - "date": "28/10/2018", - "token": "token", - "email_notif": True, + 'firstname': 'John', + 'lastname': 'Doe', + 'description': 'four : 1', + 'date': '28/10/2018', + 'token': 'token', + 'email_notif': True, } res.update(kwargs) return res @@ -129,8 +129,8 @@ def test_checkdate_iws_has_dates(app, setup, monkeypatch, settings, endpoint_dum assert json_result['err'] == 0 dates = json_result['data'] assert len(dates) == 2 - assert dates[0] == {"id": "18/06/2018", "text": "lundi 18 juin 2018", "token": "sometoken"} - assert dates[1] == {"id": "19/06/2018", "text": "mardi 19 juin 2018", "token": "sometoken"} + assert dates[0] == {'id': '18/06/2018', 'text': 'lundi 18 juin 2018', 'token': 'sometoken'} + assert dates[1] == {'id': '19/06/2018', 'text': 'mardi 19 juin 2018', 'token': 'sometoken'} soap_args = soap_call.call_args[0][0] assert soap_args['C_STAPPEL'] == 'B' @@ -349,7 +349,7 @@ def test_bookdate_tel_motif(app, setup, monkeypatch): }, ) params = create_params() - params['tel_number'] = "0101010101" + params['tel_number'] = '0101010101' response = app.post_json('/iws/slug-iws/bookdate/', params=params) soap_args = soap_call.call_args[0][0] assert soap_args['I_AP_TEL_DEMANDEU'] == '0101010101' @@ -369,7 +369,7 @@ def test_bookdate_sms_true(app, setup, monkeypatch, sms): }, ) params = create_params() - params['tel_number'] = "0101010101" + params['tel_number'] = '0101010101' params['sms'] = sms response = app.post_json('/iws/slug-iws/bookdate/', params=params) soap_args = soap_call.call_args[0][0] @@ -390,7 +390,7 @@ def test_bookdate_sms_false(app, setup, monkeypatch, sms): }, ) params = create_params() - params['tel_number'] = "0101010101" + params['tel_number'] = '0101010101' params['sms'] = sms response = app.post_json('/iws/slug-iws/bookdate/', params=params) soap_args = soap_call.call_args[0][0] diff --git a/tests/test_jsonresponse.py b/tests/test_jsonresponse.py index 26edc135..549c909f 100644 --- a/tests/test_jsonresponse.py +++ b/tests/test_jsonresponse.py @@ -58,7 +58,7 @@ def test_jsonresponselog_get(rf, caplog, settings): assert hasattr(record, 'method') if record.method == 'POST': assert hasattr(record, 'body') - assert "Error occurred while processing request" in record.message + assert 'Error occurred while processing request' in record.message caplog.clear() @@ -107,7 +107,7 @@ def test_jsonresponse_log_as_warning_exception(caplog): assert record.levelno == logging.WARNING assert hasattr(record, 'method') assert record.method == 'GET' - assert "Error occurred while processing request" in record.message + assert 'Error occurred while processing request' in record.message assert response.status_code == 488 data = json.loads(response.content) assert data['err'] == 'logaswarningexception' @@ -120,7 +120,7 @@ def test_jsonresponse_error_header(): @to_json() def test_func(req): - return {"test": "un test"} + return {'test': 'un test'} result = test_func(req) assert result.status_code == 200 @@ -155,7 +155,7 @@ def test_jsonresponse_with_callback(): @to_json() def test_func(req): - return {"foo": "bar"} + return {'foo': 'bar'} result = test_func(req) content_type = result.get('Content-Type') @@ -171,7 +171,7 @@ def test_jsonresponse_with_wrong_callback(): @to_json() def test_func(req): - return {"foo": "bar"} + return {'foo': 'bar'} result = test_func(req) assert result.status_code == 400 diff --git a/tests/test_lille_kimoce.py b/tests/test_lille_kimoce.py index aa249722..4b109650 100644 --- a/tests/test_lille_kimoce.py +++ b/tests/test_lille_kimoce.py @@ -194,7 +194,7 @@ def test_get_token(mocked_post, app, setup): mocked_post.return_value = tests.utils.FakedResponse(content=TOKEN_RESPONSE, status_code=200) setup.get_token() assert mocked_post.call_count == 2 - assert "api/login_check" in mocked_post.call_args[0][0] + assert 'api/login_check' in mocked_post.call_args[0][0] assert mocked_post.call_args[1]['json']['username'] == 'test' assert mocked_post.call_args[1]['json']['password'] == 'secret' # make sure the token from cache is used diff --git a/tests/test_lille_urban_card.py b/tests/test_lille_urban_card.py index 0a12bd0b..fa023af3 100644 --- a/tests/test_lille_urban_card.py +++ b/tests/test_lille_urban_card.py @@ -40,10 +40,10 @@ def mocked_http(url, request): return {'content': json.dumps(content), 'status_code': 200} if url.path.startswith('/clu/ws/consulterDemande/'): content = { - "n_demande_clu": '...', - "statut": "200", - "statut_desc": "CARTE_PRODUITE_EXPEDIEE", - "date": "2019-01-01 00:00:00", + 'n_demande_clu': '...', + 'statut': '200', + 'statut_desc': 'CARTE_PRODUITE_EXPEDIEE', + 'date': '2019-01-01 00:00:00', } return {'content': json.dumps(content), 'status_code': 200} if url.path == '/clu/ws/ajouterAbonnements': diff --git a/tests/test_litteralis.py b/tests/test_litteralis.py index ed707def..a4ef110e 100644 --- a/tests/test_litteralis.py +++ b/tests/test_litteralis.py @@ -43,11 +43,11 @@ def test_demandes_recues(app, connector): 'idCollectivite': '1', 'nomCollectivite': 'Malakoff', }, - 'geom': {'type': 'Point', 'coordinates': ["48.866667", "2.333333"]}, + 'geom': {'type': 'Point', 'coordinates': ['48.866667', '2.333333']}, 'additionalInformation': { 'typeDemande': 'Stationnement pour travaux', - "dateDebut": "2019-12-04T14:33:13", - "dateFin": "2019-12-09T14:33:13", + 'dateDebut': '2019-12-04T14:33:13', + 'dateFin': '2019-12-09T14:33:13', }, } diff --git a/tests/test_manager.py b/tests/test_manager.py index b5ddc118..426cb04b 100644 --- a/tests/test_manager.py +++ b/tests/test_manager.py @@ -698,7 +698,7 @@ def test_manager_import_export(app, admin_user): resp = resp.form.submit() assert ( resp.html.find('ul', {'class': 'errorlist'}).li.text - == "Unknown connectors: not_installed_app1, not_installed_app2" + == 'Unknown connectors: not_installed_app1, not_installed_app2' ) # import site @@ -813,8 +813,8 @@ class TestRequestsSubstitutionsDisplay: @pytest.fixture def setup(self, settings): settings.CONNECTORS_SETTINGS = { - "photon/t": { - "requests_substitutions": [ + 'photon/t': { + 'requests_substitutions': [ { 'search': 'abcd', 'replace': 'efgh', diff --git a/tests/test_mdel.py b/tests/test_mdel.py index c2c8b2af..f3714042 100644 --- a/tests/test_mdel.py +++ b/tests/test_mdel.py @@ -123,14 +123,14 @@ def test_invalid_demand_no_form_number(app, setup): ILE_PAYLOAD_INVALID_NO = copy.deepcopy(ILE_PAYLOAD) ILE_PAYLOAD_INVALID_NO.pop('display_id') resp = app.post_json('/mdel/test/create', params=ILE_PAYLOAD_INVALID_NO, status=200) - assert resp.json['err_desc'] == "display_id is required" + assert resp.json['err_desc'] == 'display_id is required' def test_create_rco_demand_type(app, setup): RCO_PAYLOAD = copy.deepcopy(ILE_PAYLOAD) RCO_PAYLOAD['extra']['demand_type'] = 'rco-la' resp = app.post_json('/mdel/test/create', params=RCO_PAYLOAD, status=200) - assert resp.json['err_desc'] == "RCO-LA processing not implemented" + assert resp.json['err_desc'] == 'RCO-LA processing not implemented' def test_create_aec_demand_type(app, setup, aec_payload): diff --git a/tests/test_mdph13.py b/tests/test_mdph13.py index c9cbe868..df919097 100644 --- a/tests/test_mdph13.py +++ b/tests/test_mdph13.py @@ -39,134 +39,134 @@ IP = '88.34.56.56' VALID_RESPONSE = json.dumps( { 'err': 0, - "data": { - "numero": FILE_NUMBER, - "beneficiaire": { - "nom": "Martini", - "prenom": "Alfonso", - "tel_mobile": "06 01 02 03 04", - "tel_fixe": "04.01.02.03.04", - "date_de_naissance": "1951-03-23", - "email": "martini.a@free.fr", - "entourage": [ + 'data': { + 'numero': FILE_NUMBER, + 'beneficiaire': { + 'nom': 'Martini', + 'prenom': 'Alfonso', + 'tel_mobile': '06 01 02 03 04', + 'tel_fixe': '04.01.02.03.04', + 'date_de_naissance': '1951-03-23', + 'email': 'martini.a@free.fr', + 'entourage': [ { - "role": "Père", - "nom": "DUPONT Henri", - "tel_mobile": "0123232323", - "tel_fixe": "0202020202", - "email": "henri.dupont@xyz.com", + 'role': 'Père', + 'nom': 'DUPONT Henri', + 'tel_mobile': '0123232323', + 'tel_fixe': '0202020202', + 'email': 'henri.dupont@xyz.com', }, { - "role": "Mère", - "nom": "DUPONT Marie", - "tel_mobile": "0123232323", - "tel_fixe": "0202020202", - "email": "marie.dupont@xyz.com", + 'role': 'Mère', + 'nom': 'DUPONT Marie', + 'tel_mobile': '0123232323', + 'tel_fixe': '0202020202', + 'email': 'marie.dupont@xyz.com', }, { - "role": "Aidant", - "nom": "ROBERT Fanny", - "tel_mobile": "0123232323", - "tel_fixe": "0202020202", - "email": "frobert@xyz.com", + 'role': 'Aidant', + 'nom': 'ROBERT Fanny', + 'tel_mobile': '0123232323', + 'tel_fixe': '0202020202', + 'email': 'frobert@xyz.com', }, ], - "adresse": { - "adresse_2": "Bliblibli", - "adresse_3": "Bliblibli", - "adresse_4": "CHEMIN DE LA CARRAIRE", - "adresse_5": "Bliblibli", - "code_postal": "13500", - "ville": "MARTIGUES", + 'adresse': { + 'adresse_2': 'Bliblibli', + 'adresse_3': 'Bliblibli', + 'adresse_4': 'CHEMIN DE LA CARRAIRE', + 'adresse_5': 'Bliblibli', + 'code_postal': '13500', + 'ville': 'MARTIGUES', }, - "incapacite": {"taux": "Taux >=80%", "date_fin_effet": "2019-06-30"}, + 'incapacite': {'taux': 'Taux >=80%', 'date_fin_effet': '2019-06-30'}, }, - "demandes": [ + 'demandes': [ { - "numero": "1544740", - "date_demande": "2015-11-26", - "type_demande": "Renouvellement", - "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", - "statut": "Instruction administrative terminée en attente de passage en évaluation", - "typologie": "Demande En Cours", - "date_decision": None, + 'numero': '1544740', + 'date_demande': '2015-11-26', + 'type_demande': 'Renouvellement', + 'prestation': "Carte d'invalidité (de priorité) pour personne handicapée", + 'statut': 'Instruction administrative terminée en attente de passage en évaluation', + 'typologie': 'Demande En Cours', + 'date_decision': None, }, { - "numero": "1210524", - "date_demande": "2014-06-13", - "type_demande": "Renouvellement", - "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", - "statut": "Décision prononcée et expédition réalisée (traitement terminé)", - "typologie": "Traitée non expédiée", - "date_decision": "2014-07-10", - "date_debut_effet": "2014-08-01", - "date_fin_effet": "2016-05-01", + 'numero': '1210524', + 'date_demande': '2014-06-13', + 'type_demande': 'Renouvellement', + 'prestation': "Carte d'invalidité (de priorité) pour personne handicapée", + 'statut': 'Décision prononcée et expédition réalisée (traitement terminé)', + 'typologie': 'Traitée non expédiée', + 'date_decision': '2014-07-10', + 'date_debut_effet': '2014-08-01', + 'date_fin_effet': '2016-05-01', }, { - "numero": "1231345", - "date_demande": "2014-07-22", - "type_demande": "Recours Gracieux", - "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", - "statut": "Décision prononcée et expédition réalisée (traitement terminé)", - "typologie": "Traitée et expédiée", - "date_decision": "2014-09-17", - "date_debut_effet": "2014-08-01", - "date_fin_effet": "2016-05-01", + 'numero': '1231345', + 'date_demande': '2014-07-22', + 'type_demande': 'Recours Gracieux', + 'prestation': "Carte d'invalidité (de priorité) pour personne handicapée", + 'statut': 'Décision prononcée et expédition réalisée (traitement terminé)', + 'typologie': 'Traitée et expédiée', + 'date_decision': '2014-09-17', + 'date_debut_effet': '2014-08-01', + 'date_fin_effet': '2016-05-01', }, { - "numero": "666660", - "date_demande": "2012-08-13", - "type_demande": "Recours Gracieux", - "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", - "statut": "Décision prononcée et expédition réalisée (traitement terminé)", - "typologie": "Traitée et expédiée", - "date_decision": "2012-09-26", - "date_debut_effet": "2012-07-19", - "date_fin_effet": "2014-08-01", + 'numero': '666660', + 'date_demande': '2012-08-13', + 'type_demande': 'Recours Gracieux', + 'prestation': "Carte d'invalidité (de priorité) pour personne handicapée", + 'statut': 'Décision prononcée et expédition réalisée (traitement terminé)', + 'typologie': 'Traitée et expédiée', + 'date_decision': '2012-09-26', + 'date_debut_effet': '2012-07-19', + 'date_fin_effet': '2014-08-01', }, { - "numero": "605280", - "date_demande": "2012-04-05", - "type_demande": "1ère demande", - "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", - "statut": "Décision prononcée et expédition réalisée (traitement terminé)", - "typologie": "Traitée et expédiée", - "date_decision": "2012-07-19", - "date_debut_effet": "2012-07-19", - "date_fin_effet": "2014-05-01", + 'numero': '605280', + 'date_demande': '2012-04-05', + 'type_demande': '1ère demande', + 'prestation': "Carte d'invalidité (de priorité) pour personne handicapée", + 'statut': 'Décision prononcée et expédition réalisée (traitement terminé)', + 'typologie': 'Traitée et expédiée', + 'date_decision': '2012-07-19', + 'date_debut_effet': '2012-07-19', + 'date_fin_effet': '2014-05-01', }, { - "numero": "1544741", - "date_demande": "2015-11-26", - "type_demande": "Renouvellement", - "prestation": "Carte d'invalidité (de priorité) pour personne handicapée", - "statut": "Décision prononcée et expédition réalisée (traitement terminé)", - "typologie": "Traitée et expédiée", - "date_decision": "2015-12-22", - "date_debut_effet": "2016-05-01", - "date_fin_effet": "2026-05-01", + 'numero': '1544741', + 'date_demande': '2015-11-26', + 'type_demande': 'Renouvellement', + 'prestation': "Carte d'invalidité (de priorité) pour personne handicapée", + 'statut': 'Décision prononcée et expédition réalisée (traitement terminé)', + 'typologie': 'Traitée et expédiée', + 'date_decision': '2015-12-22', + 'date_debut_effet': '2016-05-01', + 'date_fin_effet': '2026-05-01', }, { - "numero": "1210526", - "date_demande": "2014-06-13", - "type_demande": "Renouvellement", - "prestation": "Carte européenne de Stationnement", - "statut": "Décision prononcée et expédition réalisée (traitement terminé)", - "typologie": "Traitée et expédiée", - "date_decision": "2014-07-04", - "date_debut_effet": "2014-05-01", - "date_fin_effet": "2015-05-01", + 'numero': '1210526', + 'date_demande': '2014-06-13', + 'type_demande': 'Renouvellement', + 'prestation': 'Carte européenne de Stationnement', + 'statut': 'Décision prononcée et expédition réalisée (traitement terminé)', + 'typologie': 'Traitée et expédiée', + 'date_decision': '2014-07-04', + 'date_debut_effet': '2014-05-01', + 'date_fin_effet': '2015-05-01', }, { - "numero": "605281", - "date_demande": "2012-04-05", - "type_demande": "1ère demande", - "prestation": "Carte européenne de Stationnement", - "statut": "Décision prononcée et expédition réalisée (traitement terminé)", - "typologie": "Traitée et expédiée", - "date_decision": "2012-07-04", - "date_debut_effet": "2012-05-01", - "date_fin_effet": "2014-05-01", + 'numero': '605281', + 'date_demande': '2012-04-05', + 'type_demande': '1ère demande', + 'prestation': 'Carte européenne de Stationnement', + 'statut': 'Décision prononcée et expédition réalisée (traitement terminé)', + 'typologie': 'Traitée et expédiée', + 'date_decision': '2012-07-04', + 'date_debut_effet': '2012-05-01', + 'date_fin_effet': '2014-05-01', }, ], }, diff --git a/tests/test_okina.py b/tests/test_okina.py index 6583d191..4de7b7e6 100644 --- a/tests/test_okina.py +++ b/tests/test_okina.py @@ -1742,11 +1742,11 @@ def test_okina_search(app, okina): resp = app.get(endpoint + '?lat=46.828652&lon=1.701463&institution=277', status=200) assert requests_post.call_args[0][0] == 'https://okina.example.net/b2b/wishes/search' assert json.loads(requests_post.call_args[1]['data']) == { - "type": "CLOSE_SCHOLAR", - "from-address": "", - "from-lat": "46.828652", - "from-long": "1.701463", - "institution-id": "277", + 'type': 'CLOSE_SCHOLAR', + 'from-address': '', + 'from-lat': '46.828652', + 'from-long': '1.701463', + 'institution-id': '277', } assert resp.json['err'] == 0 assert len(resp.json['data']) == 2 @@ -1760,11 +1760,11 @@ def test_okina_search(app, okina): ) assert requests_post.call_args[0][0] == 'https://okina.example.net/b2b/wishes/search' assert json.loads(requests_post.call_args[1]['data']) == { - "type": "FAR_ALL", - "from-address": "nowhere", - "from-lat": "46.8", - "from-long": "1.71", - "institution-id": "280", + 'type': 'FAR_ALL', + 'from-address': 'nowhere', + 'from-lat': '46.8', + 'from-long': '1.71', + 'institution-id': '280', } assert resp.json['err'] == 0 @@ -1922,7 +1922,7 @@ def test_okina_errors(app, okina): ) resp = app.get('/okina/test/cities', status=200) assert resp.json['err'] == 1 - assert resp.json['err_desc'].startswith("Invalid credentials") + assert resp.json['err_desc'].startswith('Invalid credentials') def test_okina_suscribe(app, okina): diff --git a/tests/test_opendatasoft.py b/tests/test_opendatasoft.py index 6d895a72..bd1bc23e 100644 --- a/tests/test_opendatasoft.py +++ b/tests/test_opendatasoft.py @@ -30,62 +30,62 @@ pytestmark = pytest.mark.django_db FAKED_CONTENT_Q_SEARCH = json.dumps( { - "nhits": 76, - "parameters": { - "dataset": "referentiel-adresse-test", - "format": "json", - "q": "rue de l'aubepine", - "rows": 3, - "timezone": "UTC", + 'nhits': 76, + 'parameters': { + 'dataset': 'referentiel-adresse-test', + 'format': 'json', + 'q': "rue de l'aubepine", + 'rows': 3, + 'timezone': 'UTC', }, - "records": [ + 'records': [ { - "datasetid": "referentiel-adresse-test", - "fields": { - "adresse_complete": "33 RUE DE L'AUBEPINE STRASBOURG", - "date_exprt": "2019-10-23", - "geo_point": [48.6060963542, 7.76978279836], - "nom_commun": "Strasbourg", - "nom_rue": "RUE DE L'AUBEPINE", - "num_com": 482, - "numero": "33", - "source": "Ville et Eurométropole de Strasbourg", + 'datasetid': 'referentiel-adresse-test', + 'fields': { + 'adresse_complete': "33 RUE DE L'AUBEPINE STRASBOURG", + 'date_exprt': '2019-10-23', + 'geo_point': [48.6060963542, 7.76978279836], + 'nom_commun': 'Strasbourg', + 'nom_rue': "RUE DE L'AUBEPINE", + 'num_com': 482, + 'numero': '33', + 'source': 'Ville et Eurométropole de Strasbourg', }, - "geometry": {"coordinates": [7.76978279836, 48.6060963542], "type": "Point"}, - "record_timestamp": "2019-12-02T14:15:08.376000+00:00", - "recordid": "e00cf6161e52a4c8fe510b2b74d4952036cb3473", + 'geometry': {'coordinates': [7.76978279836, 48.6060963542], 'type': 'Point'}, + 'record_timestamp': '2019-12-02T14:15:08.376000+00:00', + 'recordid': 'e00cf6161e52a4c8fe510b2b74d4952036cb3473', }, { - "datasetid": "referentiel-adresse-test", - "fields": { - "adresse_complete": "19 RUE DE L'AUBEPINE LIPSHEIM", - "date_exprt": "2019-10-23", - "geo_point": [48.4920620548, 7.66177412454], - "nom_commun": "Lipsheim", - "nom_rue": "RUE DE L'AUBEPINE", - "num_com": 268, - "numero": "19", - "source": "Ville et Eurométropole de Strasbourg", + 'datasetid': 'referentiel-adresse-test', + 'fields': { + 'adresse_complete': "19 RUE DE L'AUBEPINE LIPSHEIM", + 'date_exprt': '2019-10-23', + 'geo_point': [48.4920620548, 7.66177412454], + 'nom_commun': 'Lipsheim', + 'nom_rue': "RUE DE L'AUBEPINE", + 'num_com': 268, + 'numero': '19', + 'source': 'Ville et Eurométropole de Strasbourg', }, - "geometry": {"coordinates": [7.66177412454, 48.4920620548], "type": "Point"}, - "record_timestamp": "2019-12-02T14:15:08.376000+00:00", - "recordid": "7cafcd5c692773e8b863587b2d38d6be82e023d8", + 'geometry': {'coordinates': [7.66177412454, 48.4920620548], 'type': 'Point'}, + 'record_timestamp': '2019-12-02T14:15:08.376000+00:00', + 'recordid': '7cafcd5c692773e8b863587b2d38d6be82e023d8', }, { - "datasetid": "referentiel-adresse-test", - "fields": { - "adresse_complete": "29 RUE DE L'AUBEPINE STRASBOURG", - "date_exprt": "2019-10-23", - "geo_point": [48.6056497224, 7.76988497729], - "nom_commun": "Strasbourg", - "nom_rue": "RUE DE L'AUBEPINE", - "num_com": 482, - "numero": "29", - "source": "Ville et Eurométropole de Strasbourg", + 'datasetid': 'referentiel-adresse-test', + 'fields': { + 'adresse_complete': "29 RUE DE L'AUBEPINE STRASBOURG", + 'date_exprt': '2019-10-23', + 'geo_point': [48.6056497224, 7.76988497729], + 'nom_commun': 'Strasbourg', + 'nom_rue': "RUE DE L'AUBEPINE", + 'num_com': 482, + 'numero': '29', + 'source': 'Ville et Eurométropole de Strasbourg', }, - "geometry": {"coordinates": [7.76988497729, 48.6056497224], "type": "Point"}, - "record_timestamp": "2019-12-02T14:15:08.376000+00:00", - "recordid": "0984a5e1745701f71c91af73ce764e1f7132e0ff", + 'geometry': {'coordinates': [7.76988497729, 48.6056497224], 'type': 'Point'}, + 'record_timestamp': '2019-12-02T14:15:08.376000+00:00', + 'recordid': '0984a5e1745701f71c91af73ce764e1f7132e0ff', }, ], } @@ -93,30 +93,30 @@ FAKED_CONTENT_Q_SEARCH = json.dumps( FAKED_CONTENT_ID_SEARCH = json.dumps( { - "nhits": 1, - "parameters": { - "dataset": "referentiel-adresse-test", - "format": "json", - "q": "recordid:7cafcd5c692773e8b863587b2d38d6be82e023d8", - "rows": 1, - "timezone": "UTC", + 'nhits': 1, + 'parameters': { + 'dataset': 'referentiel-adresse-test', + 'format': 'json', + 'q': 'recordid:7cafcd5c692773e8b863587b2d38d6be82e023d8', + 'rows': 1, + 'timezone': 'UTC', }, - "records": [ + 'records': [ { - "datasetid": "referentiel-adresse-test", - "fields": { - "adresse_complete": "19 RUE DE L'AUBEPINE LIPSHEIM", - "date_exprt": "2019-10-23", - "geo_point": [48.4920620548, 7.66177412454], - "nom_commun": "Lipsheim", - "nom_rue": "RUE DE L'AUBEPINE", - "num_com": 268, - "numero": "19", - "source": "Ville et Eurométropole de Strasbourg", + 'datasetid': 'referentiel-adresse-test', + 'fields': { + 'adresse_complete': "19 RUE DE L'AUBEPINE LIPSHEIM", + 'date_exprt': '2019-10-23', + 'geo_point': [48.4920620548, 7.66177412454], + 'nom_commun': 'Lipsheim', + 'nom_rue': "RUE DE L'AUBEPINE", + 'num_com': 268, + 'numero': '19', + 'source': 'Ville et Eurométropole de Strasbourg', }, - "geometry": {"coordinates": [7.66177412454, 48.4920620548], "type": "Point"}, - "record_timestamp": "2019-12-02T14:15:08.376000+00:00", - "recordid": "7cafcd5c692773e8b863587b2d38d6be82e023d8", + 'geometry': {'coordinates': [7.66177412454, 48.4920620548], 'type': 'Point'}, + 'record_timestamp': '2019-12-02T14:15:08.376000+00:00', + 'recordid': '7cafcd5c692773e8b863587b2d38d6be82e023d8', } ], } @@ -243,7 +243,7 @@ def test_search_using_q(mocked_get, app, connector): 'apikey': 'my_secret', 'dataset': 'referentiel-adresse-test', 'rows': '3', - 'q': "rue de aubepine", + 'q': 'rue de aubepine', } assert not resp.json['err'] assert len(resp.json['data']) == 3 @@ -343,7 +343,7 @@ def test_query_q_using_q(mocked_get, app, query): 'refine.source': ['Ville et Eurométropole de Strasbourg'], 'exclude.numero': ['42', '43'], 'rows': 3, - 'q': "rue de aubepine", + 'q': 'rue de aubepine', } assert not resp.json['err'] assert len(resp.json['data']) == 3 @@ -453,7 +453,7 @@ def test_query_q_having_original_fields(mocked_get, app, query): assert resp.json['data'][0]['original_text'] == 'original text' assert ( resp.json['data'][0]['text'] - == "7cafcd5c692773e8b863587b2d38d6be82e023d8 - original id - original text" + == '7cafcd5c692773e8b863587b2d38d6be82e023d8 - original id - original text' ) diff --git a/tests/test_opengis.py b/tests/test_opengis.py index 8cacdc1f..14b56e4b 100644 --- a/tests/test_opengis.py +++ b/tests/test_opengis.py @@ -614,7 +614,7 @@ def test_get_feature_bad_result(mocked_get, app, connector): @pytest.mark.parametrize( - "server_responses, version, typename_label", + 'server_responses, version, typename_label', [(geoserver_responses_v1_0_0, '1.0.0', 'typename'), (geoserver_responses, '2.0.0', 'typenames')], ) @mock.patch('passerelle.utils.Request.get') diff --git a/tests/test_photon.py b/tests/test_photon.py index 98ad73a3..8af59906 100644 --- a/tests/test_photon.py +++ b/tests/test_photon.py @@ -26,41 +26,41 @@ import tests.utils from passerelle.apps.photon.models import AddressCacheModel, Photon CONTENT = { - "features": [ + 'features': [ { - "geometry": {"coordinates": [4.8522272, 45.7587414], "type": "Point"}, - "properties": { - "city": "Lyon 3ème Arrondissement", - "country": "France", - "housenumber": "208", - "osm_id": 154419, - "osm_key": "place", - "osm_type": "N", - "osm_value": "house", - "postcode": "69003", - "street": "Rue Garibaldi", - "type": "house", + 'geometry': {'coordinates': [4.8522272, 45.7587414], 'type': 'Point'}, + 'properties': { + 'city': 'Lyon 3ème Arrondissement', + 'country': 'France', + 'housenumber': '208', + 'osm_id': 154419, + 'osm_key': 'place', + 'osm_type': 'N', + 'osm_value': 'house', + 'postcode': '69003', + 'street': 'Rue Garibaldi', + 'type': 'house', }, - "type": "Feature", + 'type': 'Feature', }, { - "geometry": {"coordinates": [4.8522681, 45.7585214], "type": "Point"}, - "properties": { - "city": "Lyon 3ème Arrondissement", - "country": "France", - "housenumber": "208bis", - "osm_id": 153400, - "osm_key": "place", - "osm_type": "N", - "osm_value": "house", - "postcode": "69003", - "street": "Rue Garibaldi", - "type": "house", + 'geometry': {'coordinates': [4.8522681, 45.7585214], 'type': 'Point'}, + 'properties': { + 'city': 'Lyon 3ème Arrondissement', + 'country': 'France', + 'housenumber': '208bis', + 'osm_id': 153400, + 'osm_key': 'place', + 'osm_type': 'N', + 'osm_value': 'house', + 'postcode': '69003', + 'street': 'Rue Garibaldi', + 'type': 'house', }, - "type": "Feature", + 'type': 'Feature', }, ], - "type": "FeatureCollection", + 'type': 'FeatureCollection', } FAKED_CONTENT = json.dumps(CONTENT) @@ -166,7 +166,7 @@ def test_photon_api_timeout(mocked_get, app, photon): @mock.patch('passerelle.utils.Request.get') def test_photon_api_error(mocked_get, app, photon): def raise_for_status(): - raise HTTPError("400 Client Error: Bad Request for url: xxx") + raise HTTPError('400 Client Error: Bad Request for url: xxx') response = tests.utils.FakedResponse(content=json.dumps({'title': 'error'}), status_code=400) response.raise_for_status = raise_for_status diff --git a/tests/test_planitech.py b/tests/test_planitech.py index c3ed4f84..c0842daf 100644 --- a/tests/test_planitech.py +++ b/tests/test_planitech.py @@ -23,41 +23,41 @@ def assert_mste(data, ref_data): @pytest.mark.parametrize( - "data,mste_data", + 'data,mste_data', [ - (None, ["MSTE0102", 6, "CRC82413E70", 0, 0, 0]), - ("toto", ["MSTE0102", 7, "CRCD45ACB10", 0, 0, 21, "toto"]), # string - (mste.Couple(("toto", "tata")), ["MSTE0102", 10, "CRCD45ACB10", 0, 0, 32, 21, "toto", 21, "tata"]), + (None, ['MSTE0102', 6, 'CRC82413E70', 0, 0, 0]), + ('toto', ['MSTE0102', 7, 'CRCD45ACB10', 0, 0, 21, 'toto']), # string + (mste.Couple(('toto', 'tata')), ['MSTE0102', 10, 'CRCD45ACB10', 0, 0, 32, 21, 'toto', 21, 'tata']), # couple ( - [mste.Couple(("toto", "tata")), mste.Couple(("toto", "tata"))], - ["MSTE0102", 14, "CRCD45ACB10", 0, 0, 31, 2, 32, 21, "toto", 21, "tata", 9, 1], + [mste.Couple(('toto', 'tata')), mste.Couple(('toto', 'tata'))], + ['MSTE0102', 14, 'CRCD45ACB10', 0, 0, 31, 2, 32, 21, 'toto', 21, 'tata', 9, 1], ), # couple are stored in refs - (["toto"], ["MSTE0102", 9, "CRCD4E14B75", 0, 0, 31, 1, 21, "toto"]), # array + (['toto'], ['MSTE0102', 9, 'CRCD4E14B75', 0, 0, 31, 1, 21, 'toto']), # array ( - ["toto", "tata", "toto"], - ["MSTE0102", 13, "CRC7311752F", 0, 0, 31, 3, 21, "toto", 21, "tata", 9, 1], + ['toto', 'tata', 'toto'], + ['MSTE0102', 13, 'CRC7311752F', 0, 0, 31, 3, 21, 'toto', 21, 'tata', 9, 1], ), # array with reference - ({"mykey": "toto"}, ["MSTE0102", 11, "CRC1C9E9FE1", 0, 1, "mykey", 30, 1, 0, 21, "toto"]), + ({'mykey': 'toto'}, ['MSTE0102', 11, 'CRC1C9E9FE1', 0, 1, 'mykey', 30, 1, 0, 21, 'toto']), # dictionnary ( - [{"mykey": "toto"}, {"mykey": "toto"}], - ["MSTE0102", 15, "CRC1C9E9FE1", 0, 1, "mykey", 31, 2, 30, 1, 0, 21, "toto", 9, 1], + [{'mykey': 'toto'}, {'mykey': 'toto'}], + ['MSTE0102', 15, 'CRC1C9E9FE1', 0, 1, 'mykey', 31, 2, 30, 1, 0, 21, 'toto', 9, 1], ), # dictionnary are stored in refs - (float(2), ["MSTE0102", 7, "CRC1C9E9FE1", 0, 0, 20, 2]), # decimal - ([float(2), float(2)], ["MSTE0102", 11, "CRC1C9E9FE1", 0, 0, 31, 2, 20, 2, 9, 1]), + (float(2), ['MSTE0102', 7, 'CRC1C9E9FE1', 0, 0, 20, 2]), # decimal + ([float(2), float(2)], ['MSTE0102', 11, 'CRC1C9E9FE1', 0, 0, 31, 2, 20, 2, 9, 1]), # decimal are stored in refs - (mste.Uint32(1), ["MSTE0102", 7, "CRC1C9E9FE1", 0, 0, 15, 1]), # uint32 - (True, ["MSTE0102", 6, "CRC1C9E9FE1", 0, 0, 1]), # True - (False, ["MSTE0102", 6, "CRC1C9E9FE1", 0, 0, 2]), # False - ('', ["MSTE0102", 6, "CRC1C9E9FE1", 0, 0, 3]), # empty string - (datetime.fromtimestamp(1537364340), ["MSTE0102", 7, "CRC1C9E9FE1", 0, 0, 22, 1537364340]), + (mste.Uint32(1), ['MSTE0102', 7, 'CRC1C9E9FE1', 0, 0, 15, 1]), # uint32 + (True, ['MSTE0102', 6, 'CRC1C9E9FE1', 0, 0, 1]), # True + (False, ['MSTE0102', 6, 'CRC1C9E9FE1', 0, 0, 2]), # False + ('', ['MSTE0102', 6, 'CRC1C9E9FE1', 0, 0, 3]), # empty string + (datetime.fromtimestamp(1537364340), ['MSTE0102', 7, 'CRC1C9E9FE1', 0, 0, 22, 1537364340]), # local date ( [datetime.fromtimestamp(1537364340), datetime.fromtimestamp(1537364340)], - ["MSTE0102", 11, "CRC1C9E9FE1", 0, 0, 31, 2, 22, 1537364340, 9, 1], + ['MSTE0102', 11, 'CRC1C9E9FE1', 0, 0, 31, 2, 22, 1537364340, 9, 1], ), # local date in refs ], @@ -74,22 +74,22 @@ def test_encode_unsupported_type(): def test_real(): mste_data = [ - "MSTE0102", + 'MSTE0102', 128, - "CRC99D9BCEB", + 'CRC99D9BCEB', 0, 11, - "requestDate", - "responseDate", - "requestName", - "requestedEndingTime", - "availablePlaces", - "label", - "freeGaps", - "placeIdentifier", - "resourceIdentifier", - "daysMask", - "requestedStartingTime", + 'requestDate', + 'responseDate', + 'requestName', + 'requestedEndingTime', + 'availablePlaces', + 'label', + 'freeGaps', + 'placeIdentifier', + 'resourceIdentifier', + 'daysMask', + 'requestedStartingTime', 30, 7, 0, @@ -100,7 +100,7 @@ def test_real(): 1538404500, 2, 21, - "getFreeGaps", + 'getFreeGaps', 3, 20, 600, @@ -111,7 +111,7 @@ def test_real(): 4, 5, 21, - "M.F.F. 2", + 'M.F.F. 2', 6, 31, 15, @@ -264,7 +264,7 @@ def test_call_planitech(connector, monkeypatch): connector._planitech_session = True response = MockResponse(content='somestring') - assert connector._call_planitech(response.session_meth, 'endpoint') == "somestring" + assert connector._call_planitech(response.session_meth, 'endpoint') == 'somestring' response = MockResponse(content=set(), status_code=400) with pytest.raises(APIError) as excinfo: @@ -744,7 +744,7 @@ def test_getplaces_referential_use_cache(app, connector): def test_login(connector): @urlmatch(netloc=r'(.*\.)?planitech\.com$') def planitech_mock(url, request): - raise requests.exceptions.RequestException("Bad news") + raise requests.exceptions.RequestException('Bad news') with HTTMock(planitech_mock): with pytest.raises(APIError) as excinfo: @@ -1005,7 +1005,7 @@ def test_get_freegaps(app, connector, monkeypatch, settings): ) json_resp = response.json assert json_resp['err'] == 1 - assert json_resp['err_desc'] == "Invalid time format: notatime" + assert json_resp['err_desc'] == 'Invalid time format: notatime' # start_date or start_days required response = app.get( @@ -1013,7 +1013,7 @@ def test_get_freegaps(app, connector, monkeypatch, settings): ) json_resp = response.json assert json_resp['err'] == 1 - assert json_resp['err_desc'] == "start_date or start_days is required" + assert json_resp['err_desc'] == 'start_date or start_days is required' # invalid display param response = app.get( @@ -1022,7 +1022,7 @@ def test_get_freegaps(app, connector, monkeypatch, settings): ) json_resp = response.json assert json_resp['err'] == 1 - assert json_resp['err_desc'] == "Valid display are: date, place, full" + assert json_resp['err_desc'] == 'Valid display are: date, place, full' def test_get_freegaps_start_days(app, connector, monkeypatch, settings, freezer): diff --git a/tests/test_plone_restapi.py b/tests/test_plone_restapi.py index 63955a1f..520dfc89 100644 --- a/tests/test_plone_restapi.py +++ b/tests/test_plone_restapi.py @@ -38,13 +38,13 @@ TOKEN_RESPONSE = { } TOKEN_ERROR_RESPONSE = { - "error": "access_denied", - "error_description": "Mauvaises informations de connexion de l'utilisateur", + 'error': 'access_denied', + 'error_description': "Mauvaises informations de connexion de l'utilisateur", } def json_get_data(filename): - with open(os.path.join(TEST_BASE_DIR, "%s.json" % filename)) as fd: + with open(os.path.join(TEST_BASE_DIR, '%s.json' % filename)) as fd: return json.load(fd) diff --git a/tests/test_proxylogger.py b/tests/test_proxylogger.py index 88fde2ea..45ddbbfd 100644 --- a/tests/test_proxylogger.py +++ b/tests/test_proxylogger.py @@ -326,7 +326,7 @@ def test_logged_requests_and_responses_max_size(app, db, monkeypatch, settings): assert len(ResourceLog.objects.all()) == 3 # - connector POST queries - assert ResourceLog.objects.all()[1].extra['request_payload'] == "connector_query_var=22222222222222222222" + assert ResourceLog.objects.all()[1].extra['request_payload'] == 'connector_query_var=22222222222222222222' assert ResourceLog.objects.all()[1].extra.get('response_headers') == {'Content-Type': 'foo/bar'} assert ( ResourceLog.objects.all()[1].extra.get('response_content') diff --git a/tests/test_requests.py b/tests/test_requests.py index 70c5c22f..b3e65e62 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -48,15 +48,15 @@ def log_level(request): def httpbin_mock(url, request): return response( 200, - {"message": "Are you really josh ?"}, - headers={"Content-Type": "application/json"}, + {'message': 'Are you really josh ?'}, + headers={'Content-Type': 'application/json'}, request=request, ) @urlmatch(netloc=r'(.*\.)?httperror\.org$') def http400_mock(url, request): - return response(400, {"foo": "bar"}, headers={"Content-Type": "application/json"}, request=request) + return response(400, {'foo': 'bar'}, headers={'Content-Type': 'application/json'}, request=request) def test_log_level(caplog, log_level): @@ -305,7 +305,7 @@ def test_resource_hawk_auth(mocked_send, caplog, endpoint_response): method='POST', content_type='application/json', content='{"key": "value"}', - ext="extra attribute", + ext='extra attribute', ) expected_header = sender.request_header @@ -526,15 +526,15 @@ def test_http_max_retries_global(mocked_make_request, settings): def test_requests_to_legacy_urls(log_level): responses.add( responses.GET, - "https://new.org/foobar", - json={"foo": "bar"}, + 'https://new.org/foobar', + json={'foo': 'bar'}, status=200, ) logger = logging.getLogger('requests') logger.setLevel(log_level) requests = Request(logger=logger) resp = requests.get('https://old.org/foobar') - assert resp.json() == {"foo": "bar"} + assert resp.json() == {'foo': 'bar'} assert resp.request.url == 'https://new.org/foobar' @@ -551,7 +551,7 @@ def test_requests_substitution(settings): requests = Request(logger=logging.getLogger(), resource=resource) settings.CONNECTORS_SETTINGS = { - "cmis/test": { + 'cmis/test': { 'requests_substitutions': [ { 'url': 'https://example.com/', @@ -563,7 +563,7 @@ def test_requests_substitution(settings): } responses.add( responses.GET, - "https://example.com/html", + 'https://example.com/html', content_type='text/html', body=b'\n\n', status=200, @@ -575,7 +575,7 @@ def test_requests_substitution(settings): responses.add( responses.GET, - "https://example.com/xml", + 'https://example.com/xml', content_type='application/xml', body=b'', status=200, @@ -585,7 +585,7 @@ def test_requests_substitution(settings): # check substitution is applied inside JSON, even if some characters are escaped responses.add( responses.GET, - "https://example.com/json", + 'https://example.com/json', content_type='application/json', body=b'{"url": "http:\\/\\/example.internal/path/"}', status=200, @@ -594,7 +594,7 @@ def test_requests_substitution(settings): responses.add( responses.GET, - "https://example.com/binary", + 'https://example.com/binary', content_type='application/octet-stream', body=b'\00', status=200, @@ -606,7 +606,7 @@ def test_requests_substitution(settings): responses.add( responses.GET, - "https://example.com/binary2", + 'https://example.com/binary2', content_type='', body=b'\00', status=200, @@ -618,7 +618,7 @@ def test_requests_substitution(settings): responses.add( responses.GET, - "https://example2.com/html", + 'https://example2.com/html', content_type='text/html', body=b'\n\n', status=200, @@ -631,7 +631,7 @@ def test_requests_substitution(settings): # check that url field is optional settings.CONNECTORS_SETTINGS = { - "cmis/test": { + 'cmis/test': { 'requests_substitutions': [ { 'search': 'http://example.internal', @@ -642,7 +642,7 @@ def test_requests_substitution(settings): } responses.add( responses.GET, - "https://whatever.com/html", + 'https://whatever.com/html', content_type='text/html', body=b'\n\n', status=200, @@ -657,7 +657,7 @@ def test_requests_substitution(settings): requests = Request(logger=logging.getLogger(), resource=resource) responses.add( responses.GET, - "https://example.com/html", + 'https://example.com/html', content_type='text/html', body=b'\n\n', status=200, @@ -685,7 +685,7 @@ def test_requests_resource_down(): responses.add( responses.GET, - "https://example.com/exception", + 'https://example.com/exception', body=ConnectionError('down'), ) with pytest.raises(ConnectionError): @@ -695,7 +695,7 @@ def test_requests_resource_down(): responses.add( responses.GET, - "https://example.com/exception", + 'https://example.com/exception', body=ConnectionError('down'), ) logger = mock.Mock() @@ -708,13 +708,13 @@ def test_requests_resource_down(): responses.add( responses.GET, - "https://example.com/exception", + 'https://example.com/exception', body='Error', status=500, ) responses.add( responses.GET, - "https://example.com/exception", + 'https://example.com/exception', body='ok', ) resource.down.return_value = False @@ -730,7 +730,7 @@ def test_requests_resource_down(): responses.add( responses.GET, - "https://example.com/exception", + 'https://example.com/exception', body='Error', status=500, ) diff --git a/tests/test_rsa13.py b/tests/test_rsa13.py index e42cee37..49ba96fb 100644 --- a/tests/test_rsa13.py +++ b/tests/test_rsa13.py @@ -137,23 +137,23 @@ def test_platform(app, rsa13, url): PLATFORM_DETAIL = { - "id": 11, - "name": "ADPEI", - "dsp": "LA", - "adr1": "ADPEI", - "adr2": None, - "adr3": None, - "adr4": "18 BOULEVARD CAMILLE FLAMMARION", - "adr5": None, - "adr6": "13001 MARSEILLE", - "tel": "0491110140", - "queries": [ - {"id": 1, "name": "NON CONSULTÉ", "count": 727}, - {"id": 2, "name": "SANS RU", "count": 727}, - {"id": 3, "name": "SANS CONTRAT SUR LA PLATEFORME", "count": 231}, - {"id": 4, "name": "CONTRAT BIENTOT TERMINE", "count": 0}, - {"id": 5, "name": "SANS AUCUNE ACTION", "count": 44}, - {"id": 6, "name": "ACTION VALIDEE NON DEBUTEE NON CLOSE", "count": 111}, + 'id': 11, + 'name': 'ADPEI', + 'dsp': 'LA', + 'adr1': 'ADPEI', + 'adr2': None, + 'adr3': None, + 'adr4': '18 BOULEVARD CAMILLE FLAMMARION', + 'adr5': None, + 'adr6': '13001 MARSEILLE', + 'tel': '0491110140', + 'queries': [ + {'id': 1, 'name': 'NON CONSULTÉ', 'count': 727}, + {'id': 2, 'name': 'SANS RU', 'count': 727}, + {'id': 3, 'name': 'SANS CONTRAT SUR LA PLATEFORME', 'count': 231}, + {'id': 4, 'name': 'CONTRAT BIENTOT TERMINE', 'count': 0}, + {'id': 5, 'name': 'SANS AUCUNE ACTION', 'count': 44}, + {'id': 6, 'name': 'ACTION VALIDEE NON DEBUTEE NON CLOSE', 'count': 111}, ], } @@ -169,22 +169,22 @@ def test_platform_details(app, rsa13, url): PLATFORM_REFERENT = [ { - "id": 324, - "nom": "EHRMANN ", - "prenom": "Jean Paul", - "tel": None, - "email": "john.doe@example.com", - "role": "Coordonnateur", - "statut": "Actif", + 'id': 324, + 'nom': 'EHRMANN ', + 'prenom': 'Jean Paul', + 'tel': None, + 'email': 'john.doe@example.com', + 'role': 'Coordonnateur', + 'statut': 'Actif', }, { - "id": 239, - "nom": "CHAUMONT ", - "prenom": "Nadine", - "tel": '090909090909', - "email": "jane.doe@example.com", - "role": "Accompagnateur", - "statut": "Clos", + 'id': 239, + 'nom': 'CHAUMONT ', + 'prenom': 'Nadine', + 'tel': '090909090909', + 'email': 'jane.doe@example.com', + 'role': 'Accompagnateur', + 'statut': 'Clos', }, ] @@ -251,20 +251,20 @@ def test_platform_referent_update(app, rsa13, url): BENEFICIAIRE = { - "id": 386981, - "civilite": "MR", - "nom": "AAABEFBAADF", - "prenom": "ACCDCBE", - "date_naissance": "1958-01-01", - "actif": "Oui", - "matricule": "193740", - "code_postal": "13001", - "commune": "MARSEILLE", - "code_pi": "51", - "referent": " ", - "date_deb_affectation": "2019-03-11", - "consulte": "Oui", - "toppersdrodevorsa": "Oui", + 'id': 386981, + 'civilite': 'MR', + 'nom': 'AAABEFBAADF', + 'prenom': 'ACCDCBE', + 'date_naissance': '1958-01-01', + 'actif': 'Oui', + 'matricule': '193740', + 'code_postal': '13001', + 'commune': 'MARSEILLE', + 'code_pi': '51', + 'referent': ' ', + 'date_deb_affectation': '2019-03-11', + 'consulte': 'Oui', + 'toppersdrodevorsa': 'Oui', } @@ -272,8 +272,8 @@ BENEFICIAIRE = { def test_platform_beneficiaire_nom(app, rsa13, url): response = app.get(url + 'platform/11/beneficiaire/', params={'nom': 'AAABEFBAADF'}) assert response.json == { - "err": 0, - "data": [BENEFICIAIRE], + 'err': 0, + 'data': [BENEFICIAIRE], } @@ -281,8 +281,8 @@ def test_platform_beneficiaire_nom(app, rsa13, url): def test_platform_beneficiaire_matricule(app, rsa13, url): response = app.get(url + 'platform/11/beneficiaire/', params={'matricule': '193740'}) assert response.json == { - "err": 0, - "data": [BENEFICIAIRE], + 'err': 0, + 'data': [BENEFICIAIRE], } @@ -290,8 +290,8 @@ def test_platform_beneficiaire_matricule(app, rsa13, url): def test_platform_beneficiaire_page(app, rsa13, url): response = app.get(url + 'platform/11/beneficiaire/', params={'page': '0'}) assert response.json == { - "err": 0, - "data": [BENEFICIAIRE], + 'err': 0, + 'data': [BENEFICIAIRE], } @@ -314,19 +314,19 @@ def test_platform_beneficiaire_full(app, rsa13, url): }, ) assert response.json == { - "err": 0, - "data": [BENEFICIAIRE], + 'err': 0, + 'data': [BENEFICIAIRE], } BENEFICIAIRE_CSV = { - "NUM_CAF": '1234', - "CODE_PER": '1234', - "PRENOM_PER": 'prenom', - "DTNAI_PER": '1234', - "ACTIF_PER": '1234', - "NOM_PER": 'nom', - "INSEE_ADR": '99999', + 'NUM_CAF': '1234', + 'CODE_PER': '1234', + 'PRENOM_PER': 'prenom', + 'DTNAI_PER': '1234', + 'ACTIF_PER': '1234', + 'NOM_PER': 'nom', + 'INSEE_ADR': '99999', } @@ -963,20 +963,20 @@ def test_platform_facturation_periodes(app, rsa13, url): 'err': 0, 'data': [ { - "PLATEFORME": "APDL MARTIGUES", - "MATRICULE": "1000017", - "NOM": "VKEOIFKQS", - "PRENOM": "SABINE", - "DTNAI": "1950-01-01", - "GENRE": "Femme", - "ROLE": "Demandeur", - "CODE_POSTAL": "13320", - "COMMUNE": "BOUC BEL AIR", - "DATE_SIGN": "2021-01-07", - "DATE_DEB": "2021-01-26", - "DUREE": 6, - "DATE_FIN": None, - "COEFFICIENT": 1.5, + 'PLATEFORME': 'APDL MARTIGUES', + 'MATRICULE': '1000017', + 'NOM': 'VKEOIFKQS', + 'PRENOM': 'SABINE', + 'DTNAI': '1950-01-01', + 'GENRE': 'Femme', + 'ROLE': 'Demandeur', + 'CODE_POSTAL': '13320', + 'COMMUNE': 'BOUC BEL AIR', + 'DATE_SIGN': '2021-01-07', + 'DATE_DEB': '2021-01-26', + 'DUREE': 6, + 'DATE_FIN': None, + 'COEFFICIENT': 1.5, }, ], }, @@ -988,20 +988,20 @@ def test_platform_facturation_csv(app, rsa13, url): stream = io.StringIO(response.content.decode('utf-8-sig')) assert list(csv.reader(stream, delimiter=';')) == [ [ - "PLATEFORME", - "MATRICULE", - "NOM", - "PRENOM", - "DTNAI", - "GENRE", - "ROLE", - "CODE_POSTAL", - "COMMUNE", - "DATE_SIGN", - "DATE_DEB", - "DUREE", - "DATE_FIN", - "COEFFICIENT", + 'PLATEFORME', + 'MATRICULE', + 'NOM', + 'PRENOM', + 'DTNAI', + 'GENRE', + 'ROLE', + 'CODE_POSTAL', + 'COMMUNE', + 'DATE_SIGN', + 'DATE_DEB', + 'DUREE', + 'DATE_FIN', + 'COEFFICIENT', ], [ 'APDL MARTIGUES', @@ -1045,86 +1045,86 @@ DUREE''' [ '/api/platform/11/beneficiaire/sorti/csv', { - "err": 0, - "data": [ + 'err': 0, + 'data': [ { - "NUM_CAF": "372927", - "CODE_PER": 415443, - "NOM_PER": "DCFFEBABBDDCDEC", - "PRENOM_PER": "CBDACDCFEBBAA", - "DTNAI_PER": "1972-01-01", - "CP_PER": "13004", - "COMMUNE_PER": "MARSEILLE", - "ACTIF_PER": "Oui", - "CODE_PI": 53, - "LIB_CODE_PI": "Pôle d'insertion Marseille III", - "TOPPERSDRODEVORSA": "N", - "LIB_ETATDOSRSA": "Droit clos", - "LIB_MOTIF_ETATDOSRSA": "Clôture suite à échéance (4 mois sans droits)", - "PLT_DT_DEB_AFF": "2021-10-05", - "PLT_DT_FIN_AFF": "2022-06-13", - "PLT_MOTIF_FIN_ACC": None, - "PLT_COMMENTAIRE_REF": ( + 'NUM_CAF': '372927', + 'CODE_PER': 415443, + 'NOM_PER': 'DCFFEBABBDDCDEC', + 'PRENOM_PER': 'CBDACDCFEBBAA', + 'DTNAI_PER': '1972-01-01', + 'CP_PER': '13004', + 'COMMUNE_PER': 'MARSEILLE', + 'ACTIF_PER': 'Oui', + 'CODE_PI': 53, + 'LIB_CODE_PI': "Pôle d'insertion Marseille III", + 'TOPPERSDRODEVORSA': 'N', + 'LIB_ETATDOSRSA': 'Droit clos', + 'LIB_MOTIF_ETATDOSRSA': 'Clôture suite à échéance (4 mois sans droits)', + 'PLT_DT_DEB_AFF': '2021-10-05', + 'PLT_DT_FIN_AFF': '2022-06-13', + 'PLT_MOTIF_FIN_ACC': None, + 'PLT_COMMENTAIRE_REF': ( '29/11/2021 Mme présente au RDV, mais pas de CER car plus ' 'de RSA. Titulaire d\'une pension d\'invalidité elle a un complément d\'ASI.\nE.CASTORI' ), - "PLT_NUM_CI": None, - "PLT_PLATEFORME_CI": None, - "PLT_OPERATEUR_CI": None, - "PLT_REFERENT_CI": " ", - "PLT_DECISION_CI": None, - "PLT_DUREE_CI": None, - "PLT_DATE_DEB_CI": None, - "PLT_DATE_FIN_CI": None, - "NOUVEAU_DT_DEB_AFF": "2022-06-13", - "NOUVEAU_AFF": "SORTIE", - "NOUVEAU_COMMENTAIRE_PI": None, - "NOUVEAU_NUM_CI": None, - "NOUVEAU_PLATEFORME_CI": None, - "NOUVEAU_OPERATEUR_CI": None, - "NOUVEAU_REFERENT_CI": None, - "NOUVEAU_DECISION_CI": None, - "NOUVEAU_DUREE_CI": None, - "NOUVEAU_DATE_DEB_CI": None, - "NOUVEAU_DATE_FIN_CI": None, + 'PLT_NUM_CI': None, + 'PLT_PLATEFORME_CI': None, + 'PLT_OPERATEUR_CI': None, + 'PLT_REFERENT_CI': ' ', + 'PLT_DECISION_CI': None, + 'PLT_DUREE_CI': None, + 'PLT_DATE_DEB_CI': None, + 'PLT_DATE_FIN_CI': None, + 'NOUVEAU_DT_DEB_AFF': '2022-06-13', + 'NOUVEAU_AFF': 'SORTIE', + 'NOUVEAU_COMMENTAIRE_PI': None, + 'NOUVEAU_NUM_CI': None, + 'NOUVEAU_PLATEFORME_CI': None, + 'NOUVEAU_OPERATEUR_CI': None, + 'NOUVEAU_REFERENT_CI': None, + 'NOUVEAU_DECISION_CI': None, + 'NOUVEAU_DUREE_CI': None, + 'NOUVEAU_DATE_DEB_CI': None, + 'NOUVEAU_DATE_FIN_CI': None, }, { - "NUM_CAF": "1677380", - "CODE_PER": 816754, - "NOM_PER": "EBBCAAFBDCCF", - "PRENOM_PER": "CCCADFBCBCEBCDCEBC", - "DTNAI_PER": "1956-01-01", - "CP_PER": "13012", - "COMMUNE_PER": "MARSEILLE 12", - "ACTIF_PER": "Oui", - "CODE_PI": 53, - "LIB_CODE_PI": "Pôle d'insertion Marseille III", - "TOPPERSDRODEVORSA": "N", - "LIB_ETATDOSRSA": "Droit clos", - "LIB_MOTIF_ETATDOSRSA": "Clôture suite à échéance (4 mois sans droits)", - "PLT_DT_DEB_AFF": "2021-10-15", - "PLT_DT_FIN_AFF": "2022-06-13", - "PLT_MOTIF_FIN_ACC": None, - "PLT_COMMENTAIRE_REF": None, - "PLT_NUM_CI": None, - "PLT_PLATEFORME_CI": None, - "PLT_OPERATEUR_CI": None, - "PLT_REFERENT_CI": " ", - "PLT_DECISION_CI": None, - "PLT_DUREE_CI": None, - "PLT_DATE_DEB_CI": None, - "PLT_DATE_FIN_CI": None, - "NOUVEAU_DT_DEB_AFF": "2022-06-13", - "NOUVEAU_AFF": "SORTIE", - "NOUVEAU_COMMENTAIRE_PI": None, - "NOUVEAU_NUM_CI": "16", - "NOUVEAU_PLATEFORME_CI": "CCO CANTINI", - "NOUVEAU_OPERATEUR_CI": "CCO", - "NOUVEAU_REFERENT_CI": "O BOBEUF", - "NOUVEAU_DECISION_CI": "Validé", - "NOUVEAU_DUREE_CI": "4", - "NOUVEAU_DATE_DEB_CI": "2021-10-18", - "NOUVEAU_DATE_FIN_CI": "2022-02-18", + 'NUM_CAF': '1677380', + 'CODE_PER': 816754, + 'NOM_PER': 'EBBCAAFBDCCF', + 'PRENOM_PER': 'CCCADFBCBCEBCDCEBC', + 'DTNAI_PER': '1956-01-01', + 'CP_PER': '13012', + 'COMMUNE_PER': 'MARSEILLE 12', + 'ACTIF_PER': 'Oui', + 'CODE_PI': 53, + 'LIB_CODE_PI': "Pôle d'insertion Marseille III", + 'TOPPERSDRODEVORSA': 'N', + 'LIB_ETATDOSRSA': 'Droit clos', + 'LIB_MOTIF_ETATDOSRSA': 'Clôture suite à échéance (4 mois sans droits)', + 'PLT_DT_DEB_AFF': '2021-10-15', + 'PLT_DT_FIN_AFF': '2022-06-13', + 'PLT_MOTIF_FIN_ACC': None, + 'PLT_COMMENTAIRE_REF': None, + 'PLT_NUM_CI': None, + 'PLT_PLATEFORME_CI': None, + 'PLT_OPERATEUR_CI': None, + 'PLT_REFERENT_CI': ' ', + 'PLT_DECISION_CI': None, + 'PLT_DUREE_CI': None, + 'PLT_DATE_DEB_CI': None, + 'PLT_DATE_FIN_CI': None, + 'NOUVEAU_DT_DEB_AFF': '2022-06-13', + 'NOUVEAU_AFF': 'SORTIE', + 'NOUVEAU_COMMENTAIRE_PI': None, + 'NOUVEAU_NUM_CI': '16', + 'NOUVEAU_PLATEFORME_CI': 'CCO CANTINI', + 'NOUVEAU_OPERATEUR_CI': 'CCO', + 'NOUVEAU_REFERENT_CI': 'O BOBEUF', + 'NOUVEAU_DECISION_CI': 'Validé', + 'NOUVEAU_DUREE_CI': '4', + 'NOUVEAU_DATE_DEB_CI': '2021-10-18', + 'NOUVEAU_DATE_FIN_CI': '2022-02-18', }, ], }, @@ -1274,23 +1274,23 @@ CODE_PER 'err': 0, 'data': [ { - "id": "A1", - "text": "A1 - DAIE 13", - "description": "DISPOSITIF d'Accompagnement et d'Insertion par l'Emploi.\n", + 'id': 'A1', + 'text': 'A1 - DAIE 13', + 'description': "DISPOSITIF d'Accompagnement et d'Insertion par l'Emploi.\n", }, { - "id": "A10", - "text": "A10 - Accompagnement Global", - "description": ( + 'id': 'A10', + 'text': 'A10 - Accompagnement Global', + 'description': ( 'Accompagnement de Pôle Emploi qui permet la prise en charge simultanée de ' 'problématiques sociales et professionnelles, par l’intervention conjointe ' 'd’un travailleur social et d’un conseiller dédié de Pôle Emploi.' ), }, { - "id": "A11", - "text": "A11 - MODALH", - "description": ( + 'id': 'A11', + 'text': 'A11 - MODALH', + 'description': ( 'C’est un diagnostic qui évalue l’employabilité ou la nécessité ' 'd’un accès à une prestation plus adaptée (AAH) pour les bénéficiaires ' 'du RSA ayant un CER santé. La prescription est uniquement assuré par ' @@ -1298,27 +1298,27 @@ CODE_PER ), }, { - "id": "A12", - "text": "A12 - PHARE", - "description": ( + 'id': 'A12', + 'text': 'A12 - PHARE', + 'description': ( 'C’est un accompagnement pour le retour à l’emploi des bénéficiaires ' 'du RSA reconnus travailleur handicapé. Il se formalise par la réalisation d’un CER.' ), }, - {"id": "A13", "text": "A13 - CAP Emploi", "description": None}, + {'id': 'A13', 'text': 'A13 - CAP Emploi', 'description': None}, { - "id": "A14", - "text": "A14 - Accompagnement Global spécialisé", - "description": ( + 'id': 'A14', + 'text': 'A14 - Accompagnement Global spécialisé', + 'description': ( 'A utiliser dans le cadre de la convention signée avec trois lieux d\'accueil ' 'sur l’expérimentation de l’accompagnement global spécialisé' ), }, - {"id": "A15", "text": "A15 - Boost Emploi", "description": None}, + {'id': 'A15', 'text': 'A15 - Boost Emploi', 'description': None}, { - "id": "A16", - "text": "A16 - Lieu accueil spécialisé travailleur indépendant", - "description": None, + 'id': 'A16', + 'text': 'A16 - Lieu accueil spécialisé travailleur indépendant', + 'description': None, }, ], }, diff --git a/tests/test_sector.py b/tests/test_sector.py index fee04f86..635a23c9 100644 --- a/tests/test_sector.py +++ b/tests/test_sector.py @@ -329,7 +329,7 @@ def test_sector_endpoint_update(app, sector): result = app.put(url, params=CSV_MISSING_COLUMN, headers={'Content-Type': 'text/csv'}, status=400).json assert result['err'] == 1 - assert "missing column" in result['err_desc'] + assert 'missing column' in result['err_desc'] result = app.put(url, params=CSV_REORDERED, headers={}, status=400).json assert result['err'] == 1 diff --git a/tests/test_sigerly.py b/tests/test_sigerly.py index 93d973c9..34fa1c41 100644 --- a/tests/test_sigerly.py +++ b/tests/test_sigerly.py @@ -34,7 +34,7 @@ def connector(db): def json_get_data(filename): - with open(os.path.join(TEST_BASE_DIR, "%s.json" % filename)) as fd: + with open(os.path.join(TEST_BASE_DIR, '%s.json' % filename)) as fd: return json.dumps(json.load(fd)) diff --git a/tests/test_signal_arretes.py b/tests/test_signal_arretes.py index 456301df..f949fd0c 100644 --- a/tests/test_signal_arretes.py +++ b/tests/test_signal_arretes.py @@ -92,7 +92,7 @@ def mock_get_document_demande(url, request): if url.path.endswith('unavailable'): return response( 200, - json.dumps({'GetDocumentDemandeResult': json.dumps("fichier indisponible")}), + json.dumps({'GetDocumentDemandeResult': json.dumps('fichier indisponible')}), ) if url.path.endswith('corrupted'): diff --git a/tests/test_sivin.py b/tests/test_sivin.py index 99eab10e..391ab1f7 100644 --- a/tests/test_sivin.py +++ b/tests/test_sivin.py @@ -53,14 +53,14 @@ EURO5_INDEXES = [ ] VEHICLE_DETAILS = { - "carrosserie": "BERLINE", - "clEnvironPrf": "70/220 2001/100EURO3", - "codifVin": "VF7FCKFVB26857835", - "genreVCG": "VP", - "immatSiv": "FS032GM", - "genreVPrf": "VP", - "date1erCir": "2003-11-21", - "nSiren": "000000000", + 'carrosserie': 'BERLINE', + 'clEnvironPrf': '70/220 2001/100EURO3', + 'codifVin': 'VF7FCKFVB26857835', + 'genreVCG': 'VP', + 'immatSiv': 'FS032GM', + 'genreVPrf': 'VP', + 'date1erCir': '2003-11-21', + 'nSiren': '000000000', } VEHICLE_THEORICAL_FINITION = { diff --git a/tests/test_sms.py b/tests/test_sms.py index ab94473c..87025767 100644 --- a/tests/test_sms.py +++ b/tests/test_sms.py @@ -796,13 +796,13 @@ def test_sms_factor_alert_emails(app, freezer, mailoutbox): ) freezer.move_to('2019-01-01 00:00:00') - resp = {'credits': "101"} + resp = {'credits': '101'} url = connector.URL with tests.utils.mock_url(url, resp, 200): connector.check_status() assert len(mailoutbox) == 0 - resp = {'credits': "99"} + resp = {'credits': '99'} url = connector.URL with tests.utils.mock_url(url, resp, 200): connector.check_status() @@ -812,7 +812,7 @@ def test_sms_factor_alert_emails(app, freezer, mailoutbox): assert mail.recipients() == ['test@entrouvert.org'] assert mail.subject == 'SMS Factor alert: only 99 credits left' for body in (mail.body, mail.alternatives[0][0]): - assert "SMS Factor" in body + assert 'SMS Factor' in body assert connector.title in body assert 'http://localhost/smsfactor/test-sms-factor/' in body mailoutbox.clear() diff --git a/tests/test_sne.py b/tests/test_sne.py index d6d5455b..a818f6ff 100644 --- a/tests/test_sne.py +++ b/tests/test_sne.py @@ -28,7 +28,7 @@ def connector(db): def setup_(rsps, settings): settings.CONNECTORS_SETTINGS = { - "sne/test": { + 'sne/test': { 'requests_substitutions': [ { 'url': 'https://sne-ws-2.site-ecole.din.developpement-durable.gouv.invalid/', diff --git a/tests/test_soap.py b/tests/test_soap.py index 38b92da3..8d48599b 100644 --- a/tests/test_soap.py +++ b/tests/test_soap.py @@ -342,10 +342,10 @@ def test_schemas(connector, soap): def test_say_hello_method_validation_error(connector, soap, app): resp = app.get('/soap/test/method/sayHello/') assert resp.json == { - "err": 1, - "err_class": "passerelle.utils.soap.SOAPValidationError", - "err_desc": soap.VALIDATION_ERROR, - "data": None, + 'err': 1, + 'err_class': 'passerelle.utils.soap.SOAPValidationError', + 'err_desc': soap.VALIDATION_ERROR, + 'data': None, } diff --git a/tests/test_solis.py b/tests/test_solis.py index 6c22fa62..80861863 100644 --- a/tests/test_solis.py +++ b/tests/test_solis.py @@ -664,12 +664,12 @@ def test_solis_apa_integration(app, solis): url = tests.utils.generic_endpoint_url('solis', 'apa-integration', slug=solis.slug) demande = { - "beneficiaire_demande_aide": "APAD", - "beneficiaire_demande_dateDepot": "2018-02-09", - "beneficiaire_etatCivil_civilite": "M", - "beneficiaire_etatCivil_contact_courriel": "benef@yopmail.com", - "conjoint_nom": "Conjnom", - "conjoint_prenom": "Conjprenom", + 'beneficiaire_demande_aide': 'APAD', + 'beneficiaire_demande_dateDepot': '2018-02-09', + 'beneficiaire_etatCivil_civilite': 'M', + 'beneficiaire_etatCivil_contact_courriel': 'benef@yopmail.com', + 'conjoint_nom': 'Conjnom', + 'conjoint_prenom': 'Conjprenom', } resp = app.post_json(url, params=demande, status=200) diff --git a/tests/test_solis_afi_mss.py b/tests/test_solis_afi_mss.py index 36faccd1..52afbe86 100644 --- a/tests/test_solis_afi_mss.py +++ b/tests/test_solis_afi_mss.py @@ -44,12 +44,12 @@ TEST_BASE_DIR = os.path.join(os.path.dirname(__file__), 'data', 'solis_afi_mss') def json_get_data(filename): - with open(os.path.join(TEST_BASE_DIR, "%s.json" % filename)) as fd: + with open(os.path.join(TEST_BASE_DIR, '%s.json' % filename)) as fd: return json.dumps(json.load(fd)) def get_media_file(filename): - with open(os.path.join(TEST_BASE_DIR, "%s" % filename), 'rb') as desc: + with open(os.path.join(TEST_BASE_DIR, '%s' % filename), 'rb') as desc: return desc.read() @@ -67,8 +67,8 @@ RECHERCHE_PAR_EMAIL_NONE = response(200, json_get_data('rechercherParEmail_none' GET_IMPOSITION_PAR_AGENT_4 = response(200, json_get_data('getImpositionsParAgent_388405')) GET_IMPOSITION_4 = response(200, json_get_data('getImposition_388405_2019')) -GET_IMPOSITION_NONE = response(200, "") -GET_IMPOSITION_204 = response(204, "") +GET_IMPOSITION_NONE = response(200, '') +GET_IMPOSITION_204 = response(204, '') DECLARER_IMPOT_1 = response(200, json_get_data('declarerImpot_389227')) DECLARER_IMPOT_500 = response(500, json_get_data('declarerImpot_error')) @@ -286,10 +286,10 @@ def test_agent_contacts(mocked_get, app, connector): resp = app.get(endpoint) assert not resp.json['err'] assert resp.json['data']['id'] == 389227 - assert resp.json['data']['text'] == "Jacques ROUSSEAU" + assert resp.json['data']['text'] == 'Jacques ROUSSEAU' assert resp.json['data']['adresse']['codePostal'] == 75014 - assert resp.json['data']['coordonnees']['numeroPortable'] == "0688888888" - assert resp.json['data']['coordonnees']['adresseMailPerso'] == "jr@example.org" + assert resp.json['data']['coordonnees']['numeroPortable'] == '0688888888' + assert resp.json['data']['coordonnees']['adresseMailPerso'] == 'jr@example.org' @mock.patch('passerelle.utils.Request.get') @@ -847,8 +847,8 @@ def test_add_document_wrong_code_ged(mocked_post, mocked_get, app, connector): 500, json.dumps( { - "logref": "2ee75216-7bec-4040-9d91-c67565bc6f88", - "message": "Erreur non g\u00e9r\u00e9e par une application cliente: Le document n'existe pas", + 'logref': '2ee75216-7bec-4040-9d91-c67565bc6f88', + 'message': "Erreur non g\u00e9r\u00e9e par une application cliente: Le document n'existe pas", } ), ) diff --git a/tests/test_solis_apa.py b/tests/test_solis_apa.py index 515fab90..cb9ba287 100644 --- a/tests/test_solis_apa.py +++ b/tests/test_solis_apa.py @@ -54,7 +54,7 @@ def test_suivi_error(mocked_post, setup, app): mocked_post.return_value = mock.Mock(status_code=500) resp = app.get(reverse('solis-apa-suivi', kwargs={'slug': 'test', 'suivi_type': 'visite'})) - assert resp.json['err_desc'] == "suivi visite ws: error code 500" + assert resp.json['err_desc'] == 'suivi visite ws: error code 500' assert resp.json['err'] == 1 @@ -106,7 +106,7 @@ def test_integration_demande_apa_etablissement(mocked_post, setup, app, url): def test_integration_error(mocked_post, setup, app, url): mocked_post.return_value = mock.Mock(status_code=500) resp = app.post_json(url('integration'), params=json_get_data('premiere_demande_apa_etablissement.json')) - assert resp.json['err_desc'] == "integration ws: error code 500" + assert resp.json['err_desc'] == 'integration ws: error code 500' assert resp.json['err'] == 1 diff --git a/tests/test_tcl.py b/tests/test_tcl.py index fa11c819..c3fccbb4 100644 --- a/tests/test_tcl.py +++ b/tests/test_tcl.py @@ -8,101 +8,101 @@ import tests.utils from passerelle.contrib.tcl.models import Line, Stop, Tcl LIGNE_BUS = { - "values": [ + 'values': [ { - "last_update_fme": "2017-06-27 06:01:10", - "nom_trace": "Croix Rousse - Plateaux de St Rambert", - "last_update": "None", - "code_trace": "2Aa1", - "gid": "1003", - "ligne": "2", - "sens": "Aller", + 'last_update_fme': '2017-06-27 06:01:10', + 'nom_trace': 'Croix Rousse - Plateaux de St Rambert', + 'last_update': 'None', + 'code_trace': '2Aa1', + 'gid': '1003', + 'ligne': '2', + 'sens': 'Aller', }, ] } LIGNE_TRAM = { - "values": [ + 'values': [ { - "ligne": "T2", - "sens": "Retour", - "libelle": "St Priest Bel Air - Perrache", - "indice": "", - "ut": "UTT", - "couleur": "54 0 160", - "infos": "", - "gid": "4", - "last_update_fme": "2017-06-27 06:01:10", - "code_titan": "T2r2", - "last_update": "None", + 'ligne': 'T2', + 'sens': 'Retour', + 'libelle': 'St Priest Bel Air - Perrache', + 'indice': '', + 'ut': 'UTT', + 'couleur': '54 0 160', + 'infos': '', + 'gid': '4', + 'last_update_fme': '2017-06-27 06:01:10', + 'code_titan': 'T2r2', + 'last_update': 'None', }, ] } LIGNE_MF = { - "values": [ + 'values': [ { - "last_update_fme": "2017-06-27 06:01:10", - "ligne": "B", - "ut": "UTMA", - "indice": "", - "couleur": "0 170 227", - "libelle": "Charpennes - Oullins Gare", - "last_update": "None", - "gid": "17", - "code_titan": "302Aa1", - "infos": "", - "sens": "Aller", + 'last_update_fme': '2017-06-27 06:01:10', + 'ligne': 'B', + 'ut': 'UTMA', + 'indice': '', + 'couleur': '0 170 227', + 'libelle': 'Charpennes - Oullins Gare', + 'last_update': 'None', + 'gid': '17', + 'code_titan': '302Aa1', + 'infos': '', + 'sens': 'Aller', }, ] } ARRETS = { - "type": "FeatureCollection", - "features": [ + 'type': 'FeatureCollection', + 'features': [ { - "geometry": {"coordinates": [4.84756760746877, 45.7594333137236], "type": "Point"}, - "type": "Feature", - "properties": { - "pmr": "t", - "id": "46026", - "nom": "Place Guichard", - "last_update_fme": "2017-07-10 06:00:28", - "last_update": "", - "gid": "92", - "desserte": "302A:R", - "ascenseur": "f", - "escalator": "f", + 'geometry': {'coordinates': [4.84756760746877, 45.7594333137236], 'type': 'Point'}, + 'type': 'Feature', + 'properties': { + 'pmr': 't', + 'id': '46026', + 'nom': 'Place Guichard', + 'last_update_fme': '2017-07-10 06:00:28', + 'last_update': '', + 'gid': '92', + 'desserte': '302A:R', + 'ascenseur': 'f', + 'escalator': 'f', }, }, ], } PASSAGES = { - "values": [ + 'values': [ { - "last_update_fme": "2017-06-27 07:32:27", - "coursetheorique": "302A-019AT:53:1:14", - "ligne": "302A", - "direction": "Gare d'Oullins", - "gid": "12429", - "idtarretdestination": "46035", - "heurepassage": "2017-06-27 07:33:50", - "delaipassage": "1 min", - "id": "46026", - "type": "E", + 'last_update_fme': '2017-06-27 07:32:27', + 'coursetheorique': '302A-019AT:53:1:14', + 'ligne': '302A', + 'direction': "Gare d'Oullins", + 'gid': '12429', + 'idtarretdestination': '46035', + 'heurepassage': '2017-06-27 07:33:50', + 'delaipassage': '1 min', + 'id': '46026', + 'type': 'E', }, { - "gid": "12430", - "direction": "Gare d'Oullins", - "idtarretdestination": "46035", - "ligne": "302A", - "last_update_fme": "2017-06-27 07:32:27", - "coursetheorique": "302A-019AT:61:1:3", - "id": "46026", - "delaipassage": "4 min", - "type": "E", - "heurepassage": "2017-06-27 07:36:55", + 'gid': '12430', + 'direction': "Gare d'Oullins", + 'idtarretdestination': '46035', + 'ligne': '302A', + 'last_update_fme': '2017-06-27 07:32:27', + 'coursetheorique': '302A-019AT:61:1:3', + 'id': '46026', + 'delaipassage': '4 min', + 'type': 'E', + 'heurepassage': '2017-06-27 07:36:55', }, ] } diff --git a/tests/test_teamnet_axel.py b/tests/test_teamnet_axel.py index 5bc678c2..e635a27e 100644 --- a/tests/test_teamnet_axel.py +++ b/tests/test_teamnet_axel.py @@ -16,13 +16,13 @@ def setup(db): 'slug': 'test', 'wsdl_url': 'http://example.net/AXEL_WS/AxelWS.php?wsdl', 'billing_regies': { - "11": "EN2-CLASSE", - "27": "EN10-FM", - "37": "EN3-DONS", - "31": "EN31-C.V.", - "42": "EN29-RESTC", - "43": "EN32-ENFAN", - "38": "EN30-PRODD", + '11': 'EN2-CLASSE', + '27': 'EN10-FM', + '37': 'EN3-DONS', + '31': 'EN31-C.V.', + '42': 'EN29-RESTC', + '43': 'EN32-ENFAN', + '38': 'EN30-PRODD', }, }, ) diff --git a/tests/test_templatetags.py b/tests/test_templatetags.py index 62181fba..609a4a93 100644 --- a/tests/test_templatetags.py +++ b/tests/test_templatetags.py @@ -66,7 +66,7 @@ def test_render_json_schema(): def test_render_enum_schema(): assert ( - str(render_json_schema({'enum': [1, "aaa", [1]]})) + str(render_json_schema({'enum': [1, 'aaa', [1]]})) == '1 | "aaa" | [1]' ) diff --git a/tests/test_toulouse_axel.py b/tests/test_toulouse_axel.py index 7e510ef0..e68620b5 100644 --- a/tests/test_toulouse_axel.py +++ b/tests/test_toulouse_axel.py @@ -178,7 +178,7 @@ def annual_booking_params(): def test_lock(app, resource): resp = app.get('/toulouse-axel/test/lock?key=&locker=', status=400) - assert resp.json['err_desc'] == "key is empty" + assert resp.json['err_desc'] == 'key is empty' assert resp.json['err'] == 'bad-request' assert Lock.objects.count() == 0 @@ -527,7 +527,7 @@ def test_management_dates_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.toulouse_axel.schemas.ref_date_gestion_dui') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/toulouse-axel/test/management_dates') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' assert resp.json['data'] == {'xml_request': None, 'xml_response': None} @@ -578,7 +578,7 @@ def test_management_dates_endpoint(app, resource): def test_link_endpoint_nameid_empty(app, resource, link_params): resp = app.post_json('/toulouse-axel/test/link?NameID=', params=link_params, status=400) - assert resp.json['err_desc'] == "NameID is empty" + assert resp.json['err_desc'] == 'NameID is empty' assert resp.json['err'] == 'bad-request' @@ -586,7 +586,7 @@ def test_link_endpoint_axel_error(app, resource, link_params): with mock.patch('passerelle.contrib.toulouse_axel.schemas.ref_verif_dui') as operation: operation.side_effect = AxelError('FooBar') resp = app.post_json('/toulouse-axel/test/link?NameID=yyy', params=link_params) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' assert resp.json['data'] == {'xml_request': None, 'xml_response': None} @@ -606,7 +606,7 @@ def test_link_endpoint_axel_error(app, resource, link_params): with mock.patch('xmlschema.XMLSchema.validate') as xml_validate: xml_validate.side_effect = xmlschema.XMLSchemaValidationError(None, None) resp = app.post_json('/toulouse-axel/test/link?NameID=yyy', params=link_params) - assert resp.json['err_desc'].startswith("Axel error: invalid request") + assert resp.json['err_desc'].startswith('Axel error: invalid request') assert resp.json['err'] == 'error' assert resp.json['data']['xml_request'] == xml_request assert resp.json['data']['xml_response'] is None @@ -667,7 +667,7 @@ def test_link_endpoint_axel_error(app, resource, link_params): with mock.patch('passerelle.contrib.utils.axel.AxelSchema.decode') as decode: decode.side_effect = xmlschema.XMLSchemaValidationError(None, None) resp = app.post_json('/toulouse-axel/test/link?NameID=yyy', params=link_params) - assert resp.json['err_desc'].startswith("Axel error: invalid response") + assert resp.json['err_desc'].startswith('Axel error: invalid response') assert resp.json['err'] == 'error' assert resp.json['data']['xml_request'] == xml_request assert resp.json['data']['xml_response'] == xml_response @@ -675,7 +675,7 @@ def test_link_endpoint_axel_error(app, resource, link_params): with mock.patch('passerelle.contrib.toulouse_axel.models.ToulouseAxel.soap_client') as client: client.side_effect = SOAPError('SOAP service is down') resp = app.post_json('/toulouse-axel/test/link?NameID=yyy', params=link_params) - assert resp.json['err_desc'] == "SOAP service is down" + assert resp.json['err_desc'] == 'SOAP service is down' @pytest.mark.parametrize( @@ -697,7 +697,7 @@ def test_link_endpoint_no_result(app, resource, link_params, xml_response): ) with mock_getdata(content, 'RefVerifDui'): resp = app.post_json('/toulouse-axel/test/link?NameID=yyy', params=link_params) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -713,7 +713,7 @@ def test_link_endpoint_conflict(app, resource, link_params): link = Link.objects.create(resource=resource, name_id='yyy', dui='YYY', person_id='42') with mock_getdata(content, 'RefVerifDui'): resp = app.post_json('/toulouse-axel/test/link?NameID=yyy', params=link_params) - assert resp.json['err_desc'] == "Data conflict" + assert resp.json['err_desc'] == 'Data conflict' assert resp.json['err'] == 'conflict' # existing link but person_id is wrong @@ -722,7 +722,7 @@ def test_link_endpoint_conflict(app, resource, link_params): link.save() with mock_getdata(content, 'RefVerifDui'): resp = app.post_json('/toulouse-axel/test/link?NameID=yyy', params=link_params) - assert resp.json['err_desc'] == "Data conflict" + assert resp.json['err_desc'] == 'Data conflict' assert resp.json['err'] == 'conflict' @@ -760,7 +760,7 @@ def test_link_endpoint(app, resource, link_params, code): def test_unlink_endpoint_no_result(app, resource): resp = app.post('/toulouse-axel/test/unlink?NameID=yyy') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -779,7 +779,7 @@ def test_active_dui_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.toulouse_axel.schemas.ref_famille_dui') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/toulouse-axel/test/active_dui?NameID=yyy') - assert resp.json['err_desc'] == "No family info" + assert resp.json['err_desc'] == 'No family info' assert resp.json['err'] == 'no-family-info' filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/family_info.xml') @@ -789,13 +789,13 @@ def test_active_dui_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.toulouse_axel.schemas.ref_verif_dui') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/toulouse-axel/test/active_dui?NameID=yyy') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' def test_active_dui_endpoint_no_result(app, resource, family_data): resp = app.get('/toulouse-axel/test/active_dui?NameID=yyy') - assert resp.json['err_desc'] == "Unknown NameID" + assert resp.json['err_desc'] == 'Unknown NameID' assert resp.json['err'] == 'unknown' @@ -806,7 +806,7 @@ def test_active_dui_endpoint_wrong_rl(app, resource): content = xml.read() with mock_getdata(content, 'RefFamilleDui'): resp = app.get('/toulouse-axel/test/active_dui?NameID=yyy') - assert resp.json['err_desc'] == "No corresponding RL" + assert resp.json['err_desc'] == 'No corresponding RL' assert resp.json['err'] == 'no-rl' @@ -833,7 +833,7 @@ def test_active_dui_endpoint_wrong_dui_code(app, resource, family_data, xml_resp ): with mock_getdata(content, 'RefVerifDui'): resp = app.get('/toulouse-axel/test/active_dui?NameID=yyy') - assert resp.json['err_desc'] == "Wrong DUI status" + assert resp.json['err_desc'] == 'Wrong DUI status' assert resp.json['err'] == 'dui-code-error-%s' % code @@ -875,7 +875,7 @@ def test_active_dui_endpoint(app, resource, family_data, code): def test_referential_endpoint_no_result(app, resource): resp = app.get('/toulouse-axel/test/referential/foo/') - assert resp.json['err_desc'] == "Referential not found" + assert resp.json['err_desc'] == 'Referential not found' assert resp.json['err'] == 'not-found' @@ -900,7 +900,7 @@ def test_family_info_endpoint_axel_error(app, resource, family_data): with mock.patch('passerelle.contrib.toulouse_axel.schemas.ref_famille_dui') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/toulouse-axel/test/family_info?NameID=yyy') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/family_info.xml') @@ -912,12 +912,12 @@ def test_family_info_endpoint_axel_error(app, resource, family_data): management_dates.side_effect = APIError('Axel error: FooBar') with mock_getdata(content, 'RefFamilleDui'): resp = app.get('/toulouse-axel/test/family_info?NameID=yyy') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' def test_family_info_endpoint_no_result(app, resource): resp = app.get('/toulouse-axel/test/family_info?NameID=yyy') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') @@ -926,7 +926,7 @@ def test_family_info_endpoint_no_result(app, resource): json_response={'DATA': {'PORTAIL': None}}, xml_request='', xml_response='' ) resp = app.get('/toulouse-axel/test/family_info?NameID=yyy') - assert resp.json['err_desc'] == "Family not found" + assert resp.json['err_desc'] == 'Family not found' assert resp.json['err'] == 'not-found' @@ -1057,13 +1057,13 @@ def test_children_info_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.toulouse_axel.schemas.ref_famille_dui') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/toulouse-axel/test/children_info?NameID=yyy') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' def test_children_info_endpoint_no_result(app, resource): resp = app.get('/toulouse-axel/test/children_info?NameID=yyy') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -1093,13 +1093,13 @@ def test_child_info_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.toulouse_axel.schemas.ref_famille_dui') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/toulouse-axel/test/child_info?NameID=yyy&idpersonne=zzz') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' def test_child_info_endpoint_no_result(app, resource): resp = app.get('/toulouse-axel/test/child_info?NameID=yyy&idpersonne=zzz') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') @@ -1108,7 +1108,7 @@ def test_child_info_endpoint_no_result(app, resource): content = xml.read() with mock_getdata(content, 'RefFamilleDui'): resp = app.get('/toulouse-axel/test/child_info?NameID=yyy&idpersonne=zzz') - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' @@ -1176,13 +1176,13 @@ def test_child_contacts_info_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.toulouse_axel.schemas.ref_famille_dui') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/toulouse-axel/test/child_contacts_info?NameID=yyy&idpersonne=zzz') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' def test_child_contacts_info_endpoint_no_result(app, resource): resp = app.get('/toulouse-axel/test/child_contacts_info?NameID=yyy&idpersonne=zzz') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') @@ -1191,7 +1191,7 @@ def test_child_contacts_info_endpoint_no_result(app, resource): content = xml.read() with mock_getdata(content, 'RefFamilleDui'): resp = app.get('/toulouse-axel/test/child_contacts_info?NameID=yyy&idpersonne=zzz') - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' @@ -1240,21 +1240,21 @@ def test_update_family_info_endpoint_axel_error(app, resource, update_params, fa 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data ): resp = app.post_json('/toulouse-axel/test/update_family_info?NameID=yyy', params=update_params) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' assert 'error_post_data' in resp.json['data'] def test_update_family_info_endpoint_no_result(app, resource, update_params): resp = app.post_json('/toulouse-axel/test/update_family_info?NameID=yyy', params=update_params) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' def test_update_family_info_endpoint(app, resource, update_params, family_data): link = Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') - content = "" + content = '' with mock_getdata(content, 'FormMajFamilleDui'): with mock.patch( 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data @@ -1296,7 +1296,7 @@ def test_update_family_info_endpoint(app, resource, update_params, family_data): def test_update_family_info_flat_endpoint(app, resource, flat_update_params, family_data): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') - content = "" + content = '' with mock_getdata(content, 'FormMajFamilleDui'): with mock.patch( 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_family_data', return_value=family_data @@ -1512,7 +1512,7 @@ def test_pre_sanitize_update_family_data_enfant_n(app, resource, update_params): resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) assert len(partial_update_params['ENFANT']) == 1 - assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "3535" + assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == '3535' partial_update_params = copy.deepcopy(update_params) partial_update_params['maj:enfant_1'] = False @@ -1523,7 +1523,7 @@ def test_pre_sanitize_update_family_data_enfant_n(app, resource, update_params): resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) assert len(partial_update_params['ENFANT']) == 1 - assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "4242" + assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == '4242' # do not fill IDPERSONNE for the removed child partial_update_params = copy.deepcopy(update_params) @@ -1533,7 +1533,7 @@ def test_pre_sanitize_update_family_data_enfant_n(app, resource, update_params): resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) assert len(partial_update_params['ENFANT']) == 1 - assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "4242" + assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == '4242' # remove all children partial_update_params = copy.deepcopy(update_params) @@ -1572,7 +1572,7 @@ def test_pre_sanitize_update_family_data_enfant_n_assurance(app, resource, updat resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) assert len(partial_update_params['ENFANT']) == 1 - assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "3535" + assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == '3535' assert 'ASSURANCE' not in partial_update_params['ENFANT'][0] # combine with maj:enfant_n partial_update_params = copy.deepcopy(update_params) @@ -1608,7 +1608,7 @@ def test_pre_sanitize_update_family_data_enfant_n_contact(app, resource, update_ resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) assert len(partial_update_params['ENFANT']) == 1 - assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "4242" + assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == '4242' assert 'CONTACT' not in partial_update_params['ENFANT'][0] # combine with maj:enfant_n partial_update_params = copy.deepcopy(update_params) @@ -1651,7 +1651,7 @@ def test_pre_sanitize_update_family_data_enfant_n_sanitaire(app, resource, updat resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) assert len(partial_update_params['ENFANT']) == 1 - assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "3535" + assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == '3535' assert 'SANITAIRE' not in partial_update_params['ENFANT'][0] # combine with maj:enfant_n partial_update_params = copy.deepcopy(update_params) @@ -1693,7 +1693,7 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_allergie(app, resource, ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) assert len(partial_update_params['ENFANT']) == 1 - assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "4242" + assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == '4242' assert 'ALLERGIE' in partial_update_params['ENFANT'][0]['SANITAIRE'] # fields were set with origin values found in Axel new_values = partial_update_params['ENFANT'][0]['SANITAIRE']['ALLERGIE'] @@ -1711,7 +1711,7 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_allergie(app, resource, ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) assert len(partial_update_params['ENFANT']) == 1 - assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "3535" + assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == '3535' assert 'SANITAIRE' not in partial_update_params['ENFANT'][0] # combine with maj:enfant_n partial_update_params = copy.deepcopy(update_params) @@ -1833,7 +1833,7 @@ def test_pre_sanitize_update_family_data_enfant_n_sanitaire_medecin(app, resourc resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) assert len(partial_update_params['ENFANT']) == 1 - assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "3535" + assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == '3535' assert 'MEDECIN' not in partial_update_params['ENFANT'][0]['SANITAIRE'] # combine with maj:enfant_n_sanitaire partial_update_params = copy.deepcopy(update_params) @@ -1843,7 +1843,7 @@ def test_pre_sanitize_update_family_data_enfant_n_sanitaire_medecin(app, resourc resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) assert len(partial_update_params['ENFANT']) == 1 - assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "3535" + assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == '3535' assert 'SANITAIRE' not in partial_update_params['ENFANT'][0] # combine with maj:enfant_n partial_update_params = copy.deepcopy(update_params) @@ -1880,7 +1880,7 @@ def test_pre_sanitize_update_family_data_enfant_n_sanitaire_vaccin(app, resource resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) assert len(partial_update_params['ENFANT']) == 1 - assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "3535" + assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == '3535' assert 'VACCIN' not in partial_update_params['ENFANT'][0]['SANITAIRE'] # combine with maj:enfant_n_sanitaire partial_update_params = copy.deepcopy(update_params) @@ -1890,7 +1890,7 @@ def test_pre_sanitize_update_family_data_enfant_n_sanitaire_vaccin(app, resource resource.pre_sanitize_update_family_data(post_data=partial_update_params) jsonschema.validate(partial_update_params, json_schema) assert len(partial_update_params['ENFANT']) == 1 - assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "3535" + assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == '3535' assert 'SANITAIRE' not in partial_update_params['ENFANT'][0] # combine with maj:enfant_n partial_update_params = copy.deepcopy(update_params) @@ -1941,7 +1941,7 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_handicap(app, resource, ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) assert len(partial_update_params['ENFANT']) == 1 - assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "4242" + assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == '4242' # fields were set with origin values found in Axel new_values = partial_update_params['ENFANT'][0]['SANITAIRE'] for key in handicap_fields: @@ -1960,7 +1960,7 @@ def test_sanitize_update_family_data_enfant_n_sanitaire_handicap(app, resource, ): resource.sanitize_update_family_data(dui='XXX', post_data=partial_update_params) assert len(partial_update_params['ENFANT']) == 1 - assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == "3535" + assert partial_update_params['ENFANT'][0]['IDPERSONNE'] == '3535' assert 'SANITAIRE' not in partial_update_params['ENFANT'][0] # combine with maj:enfant_n partial_update_params = copy.deepcopy(update_params) @@ -2038,7 +2038,7 @@ def test_update_family_info_endpoint_sanitize_axel_error(app, resource, update_p ) as sanitize: sanitize.side_effect = APIError('Axel error: FooBar') resp = app.post_json('/toulouse-axel/test/update_family_info?NameID=yyy', params=update_params) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' def test_invoices_endpoint_axel_error(app, resource): @@ -2046,7 +2046,7 @@ def test_invoices_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.toulouse_axel.schemas.ref_facture_a_payer') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoices?NameID=yyy') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoices?NameID=', status=400) @@ -2056,7 +2056,7 @@ def test_invoices_endpoint_axel_error(app, resource): def test_invoices_endpoint_no_result(app, resource): resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoices?NameID=yyy') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -2184,13 +2184,13 @@ def test_invoices_history_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.toulouse_axel.schemas.list_dui_factures') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoices/history?NameID=yyy') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' def test_invoices_history_endpoint_no_result(app, resource): resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoices/history?NameID=yyy') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -2278,7 +2278,7 @@ def test_invoice_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.toulouse_axel.schemas.ref_facture_a_payer') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-42?NameID=yyy') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -2289,11 +2289,11 @@ def test_invoice_endpoint_no_result(app, resource): content = xml.read() with mock_getdata(content, 'RefFactureAPayer'): resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-35?NameID=yyy') - assert resp.json['err_desc'] == "Invoice not found" + assert resp.json['err_desc'] == 'Invoice not found' assert resp.json['err'] == 'not-found' with mock_getdata(content, 'RefFactureAPayer'): resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-44?NameID=yyy') - assert resp.json['err_desc'] == "Invoice not found" + assert resp.json['err_desc'] == 'Invoice not found' assert resp.json['err'] == 'not-found' @@ -2446,7 +2446,7 @@ def test_invoice_pdf_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.toulouse_axel.schemas.ref_facture_a_payer') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-42/pdf?NameID=yyy', status=404) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/invoices.xml') @@ -2456,13 +2456,13 @@ def test_invoice_pdf_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.toulouse_axel.schemas.ref_facture_pdf') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-42/pdf?NameID=yyy', status=404) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' def test_invoice_pdf_endpoint_no_result(app, resource): resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-42/pdf?NameID=yyy', status=404) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') @@ -2471,17 +2471,17 @@ def test_invoice_pdf_endpoint_no_result(app, resource): content = xml.read() with mock_getdata(content, 'RefFactureAPayer'): resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-35/pdf?NameID=yyy', status=404) - assert resp.json['err_desc'] == "Invoice not found" + assert resp.json['err_desc'] == 'Invoice not found' assert resp.json['err'] == 'not-found' with mock_getdata(content, 'RefFactureAPayer'): resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-44/pdf?NameID=yyy', status=404) - assert resp.json['err_desc'] == "Invoice not found" + assert resp.json['err_desc'] == 'Invoice not found' assert resp.json['err'] == 'not-found' with mock_getdata(content, 'RefFactureAPayer'): resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-43/pdf?NameID=yyy', status=404) - assert resp.json['err_desc'] == "PDF not available" + assert resp.json['err_desc'] == 'PDF not available' assert resp.json['err'] == 'not-available' pdf_content = ''' @@ -2491,7 +2491,7 @@ def test_invoice_pdf_endpoint_no_result(app, resource): invoice.return_value = {'has_pdf': True, 'display_id': '42'} with mock_getdata(pdf_content, 'RefFacturePDF'): resp = app.get('/toulouse-axel/test/regie/MAREGIE/invoice/XXX-42/pdf?NameID=yyy', status=404) - assert resp.json['err_desc'] == "PDF error" + assert resp.json['err_desc'] == 'PDF error' assert resp.json['err'] == 'error' @@ -2525,7 +2525,7 @@ def test_pay_invoice_endpoint_axel_error(app, resource): resp = app.post_json( '/toulouse-axel/test/regie/MAREGIE/invoice/XXX-42/pay?NameID=yyy', params=payload ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/invoices.xml') @@ -2537,7 +2537,7 @@ def test_pay_invoice_endpoint_axel_error(app, resource): resp = app.post_json( '/toulouse-axel/test/regie/MAREGIE/invoice/XXX-42/pay?NameID=yyy', params=payload ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -2553,13 +2553,13 @@ def test_pay_invoice_endpoint_no_result(app, resource): resp = app.post_json( '/toulouse-axel/test/regie/MAREGIE/invoice/XXX-35/pay?NameID=yyy', params=payload ) - assert resp.json['err_desc'] == "Invoice not found" + assert resp.json['err_desc'] == 'Invoice not found' assert resp.json['err'] == 'not-found' with mock_getdata(content, 'RefFactureAPayer'): resp = app.post_json( '/toulouse-axel/test/regie/MAREGIE/invoice/XXX-44/pay?NameID=yyy', params=payload ) - assert resp.json['err_desc'] == "Invoice not found" + assert resp.json['err_desc'] == 'Invoice not found' assert resp.json['err'] == 'not-found' @@ -2599,7 +2599,7 @@ def test_clae_years_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.toulouse_axel.schemas.enfants_activites') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/toulouse-axel/test/clae_years?NameID=yyy&pivot_date=06-15') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -2607,13 +2607,13 @@ def test_clae_years_endpoint_axel_error(app, resource): def test_clae_years_endpoint_bad_date_format(app, resource, value): Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') resp = app.get('/toulouse-axel/test/clae_years?NameID=yyy&pivot_date=%s' % value, status=400) - assert resp.json['err_desc'] == "bad date format, should be MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be MM-DD' assert resp.json['err'] == 'bad-request' def test_clae_years_endpoint_no_result(app, resource): resp = app.get('/toulouse-axel/test/clae_years?NameID=yyy&pivot_date=06-15') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -2667,7 +2667,7 @@ def test_clae_children_activities_info_endpoint_axel_error(app, resource): with mock.patch('passerelle.contrib.toulouse_axel.schemas.enfants_activites') as operation: operation.side_effect = AxelError('FooBar') resp = app.get('/toulouse-axel/test/clae_children_activities_info?NameID=yyy&booking_date=2020-01-20') - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -2677,13 +2677,13 @@ def test_clae_children_activities_info_endpoint_bad_date_format(app, resource, v resp = app.get( '/toulouse-axel/test/clae_children_activities_info?NameID=yyy&booking_date=%s' % value, status=400 ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' def test_clae_children_activities_info_endpoint_no_result(app, resource): resp = app.get('/toulouse-axel/test/clae_children_activities_info?NameID=yyy&booking_date=2020-01-20') - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -2819,7 +2819,7 @@ def test_clae_booking_activities_info_endpoint_axel_error(app, resource): resp = app.get( '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/child_activities.xml') @@ -2831,7 +2831,7 @@ def test_clae_booking_activities_info_endpoint_axel_error(app, resource): resp = app.get( '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -2843,14 +2843,14 @@ def test_clae_booking_activities_info_endpoint_bad_date_format(app, resource, va % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' resp = app.get( '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=%s' % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' @@ -2858,7 +2858,7 @@ def test_clae_booking_activities_info_endpoint_no_result(app, resource, child_ac resp = app.get( '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24' ) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') @@ -2871,7 +2871,7 @@ def test_clae_booking_activities_info_endpoint_no_result(app, resource, child_ac resp = app.get( '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=4242&start_date=2020-01-20&end_date=2020-01-24' ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' content = """ @@ -2896,7 +2896,7 @@ def test_clae_booking_activities_info_endpoint_no_result(app, resource, child_ac resp = app.get( '/toulouse-axel/test/clae_booking_activities_info?NameID=yyy&idpersonne=3535&start_date=2020-01-20&end_date=2020-01-24' ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' @@ -3232,7 +3232,7 @@ def test_clae_booking_activity_possible_days_endpoint_axel_error(app, resource): '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/child_activities.xml') @@ -3245,7 +3245,7 @@ def test_clae_booking_activity_possible_days_endpoint_axel_error(app, resource): '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -3258,14 +3258,14 @@ def test_clae_booking_activity_possible_days_endpoint_bad_date_format(app, resou '&start_date=%s&end_date=2020-01-24&activity_type=MAT' % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' resp = app.get( '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' '&start_date=2020-02-20&end_date=%s&activity_type=MAT' % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' @@ -3277,7 +3277,7 @@ def test_clae_booking_activity_possible_days_endpoint_activity_type(app, resourc '&start_date=2020-01-20&end_date=2020-01-24&activity_type=FOO', status=400, ) - assert resp.json['err_desc'] == "bad activity_type, should be MAT, MIDI, SOIR or GARD" + assert resp.json['err_desc'] == 'bad activity_type, should be MAT, MIDI, SOIR or GARD' assert resp.json['err'] == 'bad-request' @@ -3287,7 +3287,7 @@ def test_clae_booking_activity_possible_days_endpoint_no_result(app, resource, c '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' ) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') @@ -3301,7 +3301,7 @@ def test_clae_booking_activity_possible_days_endpoint_no_result(app, resource, c '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=4242' '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' content = """ @@ -3327,7 +3327,7 @@ def test_clae_booking_activity_possible_days_endpoint_no_result(app, resource, c '/toulouse-axel/test/clae_booking_activity_possible_days?NameID=yyy&idpersonne=3535' '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' with mock.patch( @@ -3617,7 +3617,7 @@ def test_clae_booking_activity_annual_possible_days_endpoint_axel_error(app, res '/toulouse-axel/test/clae_booking_activity_annual_possible_days?NameID=yyy&idpersonne=3535' '&activity_type=MAT&booking_date=2019-09-01' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -3630,7 +3630,7 @@ def test_clae_booking_activity_annual_possible_days_endpoint_bad_date_format(app '&activity_type=MAT&booking_date=%s' % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' @@ -3642,7 +3642,7 @@ def test_clae_booking_activity_annual_possible_days_endpoint_activity_type(app, '&activity_type=FOO&booking_date=2019-09-01', status=400, ) - assert resp.json['err_desc'] == "bad activity_type, should be MAT, MIDI, SOIR or GARD" + assert resp.json['err_desc'] == 'bad activity_type, should be MAT, MIDI, SOIR or GARD' assert resp.json['err'] == 'bad-request' @@ -3652,7 +3652,7 @@ def test_clae_booking_activity_annual_possible_days_endpoint_no_result(app, reso '/toulouse-axel/test/clae_booking_activity_annual_possible_days?NameID=yyy&idpersonne=3535' '&activity_type=MAT&booking_date=2019-09-01' ) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') @@ -3664,7 +3664,7 @@ def test_clae_booking_activity_annual_possible_days_endpoint_no_result(app, reso '/toulouse-axel/test/clae_booking_activity_annual_possible_days?NameID=yyy&idpersonne=4242' '&activity_type=MAT&booking_date=2019-09-01' ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' with mock.patch( @@ -3850,7 +3850,7 @@ def test_clae_booking_activity_prefill_endpoint_axel_error(app, resource): '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=3535' '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/child_activities.xml') @@ -3863,7 +3863,7 @@ def test_clae_booking_activity_prefill_endpoint_axel_error(app, resource): '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=3535' '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -3876,14 +3876,14 @@ def test_clae_booking_activity_prefill_endpoint_bad_date_format(app, resource, v '&start_date=%s&end_date=2020-01-24&activity_type=MAT' % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' resp = app.get( '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=3535' '&start_date=2020-01-20&end_date=%s&activity_type=MAT' % value, status=400, ) - assert resp.json['err_desc'] == "bad date format, should be YYYY-MM-DD" + assert resp.json['err_desc'] == 'bad date format, should be YYYY-MM-DD' assert resp.json['err'] == 'bad-request' @@ -3895,7 +3895,7 @@ def test_clae_booking_activity_prefill_endpoint_activity_type(app, resource): '&start_date=2020-01-20&end_date=2020-01-24&activity_type=FOO', status=400, ) - assert resp.json['err_desc'] == "bad activity_type, should be MAT, MIDI, SOIR or GARD" + assert resp.json['err_desc'] == 'bad activity_type, should be MAT, MIDI, SOIR or GARD' assert resp.json['err'] == 'bad-request' @@ -3905,7 +3905,7 @@ def test_clae_booking_activity_prefill_endpoint_no_result(app, resource, child_a '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=3535' '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' ) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' Link.objects.create(resource=resource, name_id='yyy', dui='XXX', person_id='42') @@ -3919,7 +3919,7 @@ def test_clae_booking_activity_prefill_endpoint_no_result(app, resource, child_a '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=4242' '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' content = """ @@ -3945,7 +3945,7 @@ def test_clae_booking_activity_prefill_endpoint_no_result(app, resource, child_a '/toulouse-axel/test/clae_booking_activity_prefill?NameID=yyy&idpersonne=3535' '&start_date=2020-01-20&end_date=2020-01-24&activity_type=MAT' ) - assert resp.json['err_desc'] == "Child not found" + assert resp.json['err_desc'] == 'Child not found' assert resp.json['err'] == 'not-found' with mock.patch( @@ -4042,7 +4042,7 @@ def test_clae_booking_endpoint_axel_error(app, resource, booking_params): with mock.patch('passerelle.contrib.toulouse_axel.schemas.enfants_activites') as operation: operation.side_effect = AxelError('FooBar') resp = app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/child_activities.xml') @@ -4052,7 +4052,7 @@ def test_clae_booking_endpoint_axel_error(app, resource, booking_params): with mock.patch('passerelle.contrib.toulouse_axel.schemas.reservation_annuelle') as operation: operation.side_effect = AxelError('FooBar') resp = app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' @@ -4063,7 +4063,7 @@ def test_clae_booking_endpoint_date_error(app, resource, booking_params): booking_params['booking_end_date'] = '2020-01-01' with freezegun.freeze_time('2019-09-01'): resp = app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params, status=400) - assert resp.json['err_desc'] == "booking_start_date should be before booking_end_date" + assert resp.json['err_desc'] == 'booking_start_date should be before booking_end_date' assert resp.json['err'] == 'bad-request' # today + 8j rule @@ -4072,16 +4072,16 @@ def test_clae_booking_endpoint_date_error(app, resource, booking_params): # date in the past with freezegun.freeze_time('2020-01-01'): resp = app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params, status=400) - assert resp.json['err_desc'] == "booking_start_date min value: 2020-01-09" + assert resp.json['err_desc'] == 'booking_start_date min value: 2020-01-09' assert resp.json['err'] == 'bad-request' # too soon with freezegun.freeze_time('2019-12-25'): resp = app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params, status=400) - assert resp.json['err_desc'] == "booking_start_date min value: 2020-01-02" + assert resp.json['err_desc'] == 'booking_start_date min value: 2020-01-02' assert resp.json['err'] == 'bad-request' with freezegun.freeze_time('2019-12-31'): resp = app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params, status=400) - assert resp.json['err_desc'] == "booking_start_date min value: 2020-01-08" + assert resp.json['err_desc'] == 'booking_start_date min value: 2020-01-08' assert resp.json['err'] == 'bad-request' # bad reference year for end_date @@ -4089,20 +4089,20 @@ def test_clae_booking_endpoint_date_error(app, resource, booking_params): booking_params['booking_end_date'] = '2020-12-31' with freezegun.freeze_time('2019-09-01'): resp = app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params, status=400) - assert resp.json['err_desc'] == "booking_end_date max value: 2020-07-31" + assert resp.json['err_desc'] == 'booking_end_date max value: 2020-07-31' assert resp.json['err'] == 'bad-request' booking_params['booking_start_date'] = '2021-01-01' booking_params['booking_end_date'] = '2021-12-31' with freezegun.freeze_time('2019-09-01'): resp = app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params, status=400) - assert resp.json['err_desc'] == "booking_end_date max value: 2021-07-31" + assert resp.json['err_desc'] == 'booking_end_date max value: 2021-07-31' assert resp.json['err'] == 'bad-request' def test_clae_booking_endpoint_no_result(app, resource, booking_params): resp = app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -4113,7 +4113,7 @@ def test_clae_booking_endpoint(app, resource, booking_params, child_activities_d with mock.patch( 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities ): - content = "" + content = '' with mock_getdata(content, 'ReservationAnnuelle'): with mock.patch('django.core.cache.cache.delete') as mock_cache_delete: resp = app.post_json('/toulouse-axel/test/clae_booking?NameID=yyy', params=booking_params) @@ -4365,7 +4365,7 @@ def test_clae_booking_annual_endpoint_axel_error(app, resource, annual_booking_p resp = app.post_json( '/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' filepath = os.path.join(os.path.dirname(__file__), 'data/toulouse_axel/child_activities.xml') @@ -4377,13 +4377,13 @@ def test_clae_booking_annual_endpoint_axel_error(app, resource, annual_booking_p resp = app.post_json( '/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params ) - assert resp.json['err_desc'] == "Axel error: FooBar" + assert resp.json['err_desc'] == 'Axel error: FooBar' assert resp.json['err'] == 'error' def test_clae_booking_annual_endpoint_no_result(app, resource, annual_booking_params): resp = app.post_json('/toulouse-axel/test/clae_booking_annual?NameID=yyy', params=annual_booking_params) - assert resp.json['err_desc'] == "Person not found" + assert resp.json['err_desc'] == 'Person not found' assert resp.json['err'] == 'not-found' @@ -4394,7 +4394,7 @@ def test_clae_booking_annual_endpoint(app, resource, annual_booking_params, chil with mock.patch( 'passerelle.contrib.toulouse_axel.models.ToulouseAxel.get_child_activities', return_value=activities ): - content = "" + content = '' with mock_getdata(content, 'ReservationAnnuelle'): with mock.patch('django.core.cache.cache.delete') as mock_cache_delete: resp = app.post_json( diff --git a/tests/test_toulouse_foederis.py b/tests/test_toulouse_foederis.py index 292f27ce..bd0e040e 100644 --- a/tests/test_toulouse_foederis.py +++ b/tests/test_toulouse_foederis.py @@ -412,51 +412,51 @@ class TestEndpoints: assert request.headers['api-key'] == APIKEY payload = json.loads(request.body) assert payload == { - "type_de_candidature": "E", - "annonce": 524522, - "R60284409": 170013, - "firstName": "John", - "lastName": "Doe", - "sexe": "H", - "date_de_naissance": "1985-03-06", - "R1249730": 93421, - "date_fin_autorisation_de_travail": "2023-05-09", - "rqth": "N", - "date_fin_rqth": "2023-05-08", - "permis_de_conduire": "A,B", - "fimo": "O", - "Date_delivrance_fimo": "2023-05-07", - "date_fin_validite_fimo": "2023-05-08", - "R1258320": 1258319, - "collectivite_agent": "Mairie de Toulouse", - "date_debut_disponibilite": "2023-05-02", - "date_fin_disponibilite": "2023-05-01", - "pretentions_salariales": "1000", - "adresse": "12 Sesame Street", - "code_postal": "77710", - "ville": "Nemours", - "telephone": "+33 636656565", - "email": "csechet@entrouvert.com", - "date_debut_contrat": "2023-05-06", - "date_fin_contrat": "2023-05-04", - "complement_information_candidature": "I need money.", - "R1261279": 1561049, - "accord_RGPD": True, - "R1249707": [157193, 157194], - "R60845221": [5776395, 5776396], - "R60845244": [5776394, 5776395], - "R15017962": [], - "temps_de_travail_souhaite": "TC", - "duree_du_contrat_de_stage_apprentissage": "2h", - "ecole_centre_de_formation_mission_loc": "Ecole de la vie", - "intitule_diplome_vise": "BE", - "specialite_diplome": "Curling", - "R1249737": 1124022, - "dernier_diplome_obtenu": "BAC", - "derniere_classe_suivie": "Terminale", + 'type_de_candidature': 'E', + 'annonce': 524522, + 'R60284409': 170013, + 'firstName': 'John', + 'lastName': 'Doe', + 'sexe': 'H', + 'date_de_naissance': '1985-03-06', + 'R1249730': 93421, + 'date_fin_autorisation_de_travail': '2023-05-09', + 'rqth': 'N', + 'date_fin_rqth': '2023-05-08', + 'permis_de_conduire': 'A,B', + 'fimo': 'O', + 'Date_delivrance_fimo': '2023-05-07', + 'date_fin_validite_fimo': '2023-05-08', + 'R1258320': 1258319, + 'collectivite_agent': 'Mairie de Toulouse', + 'date_debut_disponibilite': '2023-05-02', + 'date_fin_disponibilite': '2023-05-01', + 'pretentions_salariales': '1000', + 'adresse': '12 Sesame Street', + 'code_postal': '77710', + 'ville': 'Nemours', + 'telephone': '+33 636656565', + 'email': 'csechet@entrouvert.com', + 'date_debut_contrat': '2023-05-06', + 'date_fin_contrat': '2023-05-04', + 'complement_information_candidature': 'I need money.', + 'R1261279': 1561049, + 'accord_RGPD': True, + 'R1249707': [157193, 157194], + 'R60845221': [5776395, 5776396], + 'R60845244': [5776394, 5776395], + 'R15017962': [], + 'temps_de_travail_souhaite': 'TC', + 'duree_du_contrat_de_stage_apprentissage': '2h', + 'ecole_centre_de_formation_mission_loc': 'Ecole de la vie', + 'intitule_diplome_vise': 'BE', + 'specialite_diplome': 'Curling', + 'R1249737': 1124022, + 'dernier_diplome_obtenu': 'BAC', + 'derniere_classe_suivie': 'Terminale', } - return httmock.response(200, json.dumps({"code": 200, "results": [{"id": 42}]})) + return httmock.response(200, json.dumps({'code': 200, 'results': [{'id': 42}]})) @httmock.urlmatch() def error_handler(url, request): @@ -466,53 +466,53 @@ class TestEndpoints: response = app.post_json( '/toulouse-foederis/foederis/create-application', params={ - "additional_informations": "I need money.", - "address": "12 Sesame Street", - "address_complement": "", - "agent_collectivity": "Mairie de Toulouse", - "aimed_diploma_level": "1124022", - "announce_id": "0524522", - "availability_end_date": "2023-05-01", - "availability_start_date": "2023-05-02", - "birth_date": "1985-03-06", - "city": "Nemours", - "civility": "170013", - "contract_end_date": "2023-05-04", - "contract_start_date": "2023-05-06", - "current_situation": "1258319", - "desired_work_time": "TC", - "diploma_name": "BE", - "diploma_speciality": "Curling", - "driving_license": "A,B", - "email": "csechet@entrouvert.com", - "fimo": "o", - "fimo_delivrance_date": "2023-05-07", - "fimo_end_validity_date": "2023-05-08", - "first_name": "John", - "gender": "H", - "internship_duration": "2h", - "job_families": ["5776394", "5776395"], - "job_realms": ["5776395", "5776396"], - "job_types": ["157193", "157194"], - "last_course_taken": "Terminale", - "last_name": "Doe", - "last_obtained_diploma": "BAC", - "nationality": "93421", - "origin": "1561049", - "origin_precisions": "", - "phone": "+33 636656565", - "rgpd_agreement": "tRuE", - "rqth": False, - "rqth_end_date": "2023-05-08", - "salary_expectations": "1000", - "school_name": "Ecole de la vie", - "type": "E", - "work_authorization_end_date": "2023-05-09", - "zip": "77710", + 'additional_informations': 'I need money.', + 'address': '12 Sesame Street', + 'address_complement': '', + 'agent_collectivity': 'Mairie de Toulouse', + 'aimed_diploma_level': '1124022', + 'announce_id': '0524522', + 'availability_end_date': '2023-05-01', + 'availability_start_date': '2023-05-02', + 'birth_date': '1985-03-06', + 'city': 'Nemours', + 'civility': '170013', + 'contract_end_date': '2023-05-04', + 'contract_start_date': '2023-05-06', + 'current_situation': '1258319', + 'desired_work_time': 'TC', + 'diploma_name': 'BE', + 'diploma_speciality': 'Curling', + 'driving_license': 'A,B', + 'email': 'csechet@entrouvert.com', + 'fimo': 'o', + 'fimo_delivrance_date': '2023-05-07', + 'fimo_end_validity_date': '2023-05-08', + 'first_name': 'John', + 'gender': 'H', + 'internship_duration': '2h', + 'job_families': ['5776394', '5776395'], + 'job_realms': ['5776395', '5776396'], + 'job_types': ['157193', '157194'], + 'last_course_taken': 'Terminale', + 'last_name': 'Doe', + 'last_obtained_diploma': 'BAC', + 'nationality': '93421', + 'origin': '1561049', + 'origin_precisions': '', + 'phone': '+33 636656565', + 'rgpd_agreement': 'tRuE', + 'rqth': False, + 'rqth_end_date': '2023-05-08', + 'salary_expectations': '1000', + 'school_name': 'Ecole de la vie', + 'type': 'E', + 'work_authorization_end_date': '2023-05-09', + 'zip': '77710', }, ) - assert response.json["data"]["application_id"] == 42 + assert response.json['data']['application_id'] == 42 def test_attach_file(self, resource, app): @httmock.urlmatch(path=r'^.*/data/candidature/424242/fields/cv$') @@ -521,12 +521,12 @@ class TestEndpoints: assert request.headers['api-key'] == APIKEY payload = json.loads(request.body) assert payload == { - "contentType": "application/pdf", - "value": "base 64 content", - "fileName": "cv.pdf", + 'contentType': 'application/pdf', + 'value': 'base 64 content', + 'fileName': 'cv.pdf', } - return httmock.response(200, json.dumps({"code": 200, "results": ['Field updated']})) + return httmock.response(200, json.dumps({'code': 200, 'results': ['Field updated']})) @httmock.urlmatch() def error_handler(url, request): @@ -536,17 +536,17 @@ class TestEndpoints: response = app.post_json( '/toulouse-foederis/foederis/attach-file', params={ - "application_id": "424242", - "name": "cv", - "file": { - "content_type": "application/pdf", - "content": "base 64 content", - "filename": "cv.pdf", + 'application_id': '424242', + 'name': 'cv', + 'file': { + 'content_type': 'application/pdf', + 'content': 'base 64 content', + 'filename': 'cv.pdf', }, }, ) - assert response.json["err"] == 0 + assert response.json['err'] == 0 def test_migration_0003_no_null_no_charfield(migration): diff --git a/tests/test_toulouse_maelis.py b/tests/test_toulouse_maelis.py index e58bbb6f..66b6de63 100644 --- a/tests/test_toulouse_maelis.py +++ b/tests/test_toulouse_maelis.py @@ -51,7 +51,7 @@ def get_xml_file(filename): def get_media_file(filename): - with open(os.path.join(TEST_BASE_DIR, "%s" % filename), 'rb') as desc: + with open(os.path.join(TEST_BASE_DIR, '%s' % filename), 'rb') as desc: return desc.read() @@ -1417,7 +1417,7 @@ def test_search_family_dui(family_service, con, app): @pytest.mark.parametrize( - "xml", ['R_read_family.xml', 'R_read_family_relax.xml', 'R_read_family_reordered.xml'] + 'xml', ['R_read_family.xml', 'R_read_family_relax.xml', 'R_read_family_reordered.xml'] ) def test_read_family(family_service, xml, con, app): family_service.add_soap_response('readFamily', get_xml_file(xml)) @@ -1701,10 +1701,10 @@ def test_read_family(family_service, xml, con, app): 'description': 'bla bla PAI', } assert resp.json['data']['childList'][0]['medicalRecord'] == { - "familyDoctor": { - "name": "DRE", - "phone": "0612341234", - "address": {"street1": "Alameda", "zipcode": "90220", "town": "Compton"}, + 'familyDoctor': { + 'name': 'DRE', + 'phone': '0612341234', + 'address': {'street1': 'Alameda', 'zipcode': '90220', 'town': 'Compton'}, }, 'allergy1': 'butterscotch, imitation butterscotch, glow-in-the-dark monster make-up', 'allergy2': 'shrimp and cauliflower', @@ -1714,21 +1714,21 @@ def test_read_family(family_service, xml, con, app): 'observ2': 'Eat my shorts!', 'isAuthHospital': True, 'hospital': 'Springfield General Hospital', - "vaccinList": [ + 'vaccinList': [ { - "code": "ROR", - "libelle": "ROUGEOLE-OREILLONS-RUBEOLE", - "vaccinationDate": "2012-02-22T00:00:00+01:00", + 'code': 'ROR', + 'libelle': 'ROUGEOLE-OREILLONS-RUBEOLE', + 'vaccinationDate': '2012-02-22T00:00:00+01:00', }, { - "code": "DTC", - "libelle": "DIPHTERIE TETANOS COQUELUCHE", - "vaccinationDate": "2011-01-11T00:00:00+01:00", + 'code': 'DTC', + 'libelle': 'DIPHTERIE TETANOS COQUELUCHE', + 'vaccinationDate': '2011-01-11T00:00:00+01:00', }, { - "code": "ROR", - "libelle": "ROUGEOLE-OREILLONS-RUBEOLE", - "vaccinationDate": "1970-01-11T00:00:00+01:00", + 'code': 'ROR', + 'libelle': 'ROUGEOLE-OREILLONS-RUBEOLE', + 'vaccinationDate': '1970-01-11T00:00:00+01:00', }, ], } @@ -1813,7 +1813,7 @@ def test_read_family(family_service, xml, con, app): def test_read_family_zipcode(family_service, con, app): - family_service.add_soap_response('readFamily', get_xml_file("R_read_family.xml")) + family_service.add_soap_response('readFamily', get_xml_file('R_read_family.xml')) url = get_endpoint('read-family') ban = BaseAdresse.objects.create(slug='ban') @@ -1860,7 +1860,7 @@ def test_read_family_soap_error(family_service, con, app): def test_read_family_without_birth(family_service, con, app): - family_service.add_soap_response('readFamily', get_xml_file("R_read_family_no_birth.xml")) + family_service.add_soap_response('readFamily', get_xml_file('R_read_family_no_birth.xml')) url = get_endpoint('read-family') resp = app.get(url + '?family_id=1312') @@ -2606,7 +2606,7 @@ def test_update_family_maelis_error(family_service, con, app): Link.objects.create(resource=con, family_id='1312', name_id='local') resp = app.post_json(url + '?NameID=local', params=params) assert resp.json['err'] == 1 - assert "E65a : Il existe déjà un enfant" in resp.json['err_desc'] + assert 'E65a : Il existe déjà un enfant' in resp.json['err_desc'] def test_update_family_soap_error(family_service, con, app): @@ -9067,7 +9067,7 @@ def test_update_activity_agenda(activity_service, con, app): 'err': 0, } - with open(os.path.join(TEST_BASE_DIR, "update_activity_agenda_template.html")) as fd: + with open(os.path.join(TEST_BASE_DIR, 'update_activity_agenda_template.html')) as fd: template = fd.read() message = render_to_string(template, {'form_workflow_data_ws_response': resp.json}) assert [l.strip() for l in message.split('\n') if l.strip()] == [ @@ -9152,7 +9152,7 @@ def test_update_activity_agenda_multi_units(activity_service, con, app): 'err': 0, } - with open(os.path.join(TEST_BASE_DIR, "update_activity_agenda_template.html")) as fd: + with open(os.path.join(TEST_BASE_DIR, 'update_activity_agenda_template.html')) as fd: template = fd.read() message = render_to_string(template, {'form_workflow_data_ws_response': resp.json}) assert [x.strip() for x in message.split('\n') if x.strip()] == [ @@ -11054,7 +11054,7 @@ def test_trigger_wcs_on_cancelled_subscriptions_cron( ) trigger_body = json.loads(wcs_service.calls[-1].request.body) assert trigger_body['err'] == 1 - assert trigger_body['err_desc'] == "La facture a été annulée" + assert trigger_body['err_desc'] == 'La facture a été annulée' assert trigger_body['data']['subscription_status'] == 'cancelled' assert trigger_body['data']['regie_text'] == 'DSBL' assert any(['trigger wcs' in x.message for x in caplog.records]) @@ -11140,7 +11140,7 @@ def test_trigger_wcs_on_cancelled_subscriptions_job( ) trigger_body = json.loads(wcs_service.calls[-1].request.body) assert trigger_body['err'] == 1 - assert trigger_body['err_desc'] == "La facture a été annulée" + assert trigger_body['err_desc'] == 'La facture a été annulée' assert trigger_body['data']['subscription_status'] == 'cancelled' assert trigger_body['data']['regie_text'] == 'DSBL' assert any(['trigger wcs' in x.message for x in caplog.records]) diff --git a/tests/test_toulouse_smart.py b/tests/test_toulouse_smart.py index 9fd93964..38b9759c 100644 --- a/tests/test_toulouse_smart.py +++ b/tests/test_toulouse_smart.py @@ -84,10 +84,10 @@ def mock_response(*path_contents): @httmock.urlmatch(path=path) def handler(url, request): if payload: - ctype, pdict = cgi.parse_header(request.headers["content-type"]) + ctype, pdict = cgi.parse_header(request.headers['content-type']) if ctype == 'multipart/form-data': # here payload is an expected multipart contents list - pdict["boundary"] = bytes(pdict["boundary"], "utf-8") + pdict['boundary'] = bytes(pdict['boundary'], 'utf-8') pdict['CONTENT-LENGTH'] = request.headers['Content-Length'] postvars = cgi.parse_multipart(io.BytesIO(request.body), pdict) for i, media_content in enumerate(postvars['media']): @@ -120,12 +120,12 @@ def mock_response(*path_contents): def get_json_file(filename): - with open(os.path.join(TEST_BASE_DIR, "%s.json" % filename)) as desc: + with open(os.path.join(TEST_BASE_DIR, '%s.json' % filename)) as desc: return desc.read() def get_media_file(filename): - with open(os.path.join(TEST_BASE_DIR, "%s" % filename), 'rb') as desc: + with open(os.path.join(TEST_BASE_DIR, '%s' % filename), 'rb') as desc: return desc.read() @@ -404,7 +404,7 @@ CREATE_INTERVENTION_QUERY_WITHOUT_PROPERTIES['interventionData'] = {} ['/v1/type-intervention', None, INTERVENTION_TYPES], ['/v1/intervention', CREATE_INTERVENTION_QUERY, get_json_file('create_intervention')], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_create_intervention(mocked_uuid4, app, smart): with pytest.raises(WcsRequest.DoesNotExist): smart.wcs_requests.get(uuid=UUID) @@ -433,7 +433,7 @@ def test_create_intervention(mocked_uuid4, app, smart): ['/v1/type-intervention', None, INTERVENTION_TYPES], ['/v1/intervention', CREATE_INTERVENTION_QUERY_WITHOUT_PROPERTIES, get_json_file('create_intervention')], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_create_intervention_without_properties(mocked_uuid4, app, smart): payload = deepcopy(CREATE_INTERVENTION_PAYLOAD) payload['extra']['slug'] = 'empty' @@ -446,7 +446,7 @@ def test_create_intervention_without_properties(mocked_uuid4, app, smart): ['/v1/type-intervention', None, INTERVENTION_TYPES], ['/v1/intervention', CREATE_INTERVENTION_QUERY_WITHOUT_PROPERTIES, get_json_file('create_intervention')], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_create_intervention_providing_empty_block(mocked_uuid4, app, smart): payload = deepcopy(CREATE_INTERVENTION_PAYLOAD) payload['extra']['slug'] = 'empty' @@ -547,7 +547,7 @@ def test_create_intervention_missing_field(app, smart): ['/v1/type-intervention', None, INTERVENTION_TYPES], ['/v1/intervention', CREATE_INTERVENTION_QUERY, None, 500], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_create_intervention_twice(mocked_uuid4, app, smart): resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) assert not resp.json['err'] @@ -565,7 +565,7 @@ def test_create_intervention_twice(mocked_uuid4, app, smart): ['/v1/type-intervention', None, INTERVENTION_TYPES], ['/v1/intervention', CREATE_INTERVENTION_QUERY, None, 500], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_create_intervention_transport_error(mocked_uuid, app, freezer, smart): freezer.move_to('2021-07-08 00:00:00') resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) @@ -590,7 +590,7 @@ def test_create_intervention_transport_error(mocked_uuid, app, freezer, smart): ['/v1/type-intervention', None, INTERVENTION_TYPES], ['/v1/intervention', CREATE_INTERVENTION_QUERY, None, None, ReadTimeout('timeout')], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_create_intervention_timeout_error(mocked_uuid, app, smart): resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) assert not resp.json['err'] @@ -606,7 +606,7 @@ def test_create_intervention_timeout_error(mocked_uuid, app, smart): ['/v1/type-intervention', None, INTERVENTION_TYPES], ['/v1/intervention', CREATE_INTERVENTION_QUERY, None, 500], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_create_intervention_inconsistency_id_error(mocked_uuid4, app, freezer, smart): freezer.move_to('2021-07-08 00:00:00') resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) @@ -627,7 +627,7 @@ def test_create_intervention_inconsistency_id_error(mocked_uuid4, app, freezer, ['/v1/type-intervention', None, INTERVENTION_TYPES], ['/v1/intervention', CREATE_INTERVENTION_QUERY, 'not json content'], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_create_intervention_content_error(mocked_uuid, app, freezer, smart): freezer.move_to('2021-07-08 00:00:00') resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) @@ -641,7 +641,7 @@ def test_create_intervention_content_error(mocked_uuid, app, freezer, smart): ['/v1/type-intervention', None, INTERVENTION_TYPES], ['/v1/intervention', CREATE_INTERVENTION_QUERY, '400 Client Error', 400], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_create_intervention_client_error(mocked_uuid, app, freezer, smart): freezer.move_to('2021-07-08 00:00:00') resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) @@ -703,7 +703,7 @@ def test_create_intervention_client_error(mocked_uuid, app, freezer, smart): @mock.patch('passerelle.utils.RequestSession.request') -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_create_intervention_timeout(mocked_uuid, mocked_get, app, freezer, smart): from tests.utils import FakedResponse @@ -754,7 +754,7 @@ WCS_RESPONSE_ERROR = '{"err": 1, "err_class": "Access denied", "err_desc": null} ['/v1/intervention', CREATE_INTERVENTION_QUERY, get_json_file('create_intervention')], ['/api/forms/foo/2/hooks/update_intervention/', UPDATE_INTERVENTION_QUERY, WCS_RESPONSE_SUCCESS], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_update_intervention(mocked_uuid, app, smart, wcs_service): resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) assert not resp.json['err'] @@ -764,7 +764,7 @@ def test_update_intervention(mocked_uuid, app, smart, wcs_service): smart.wcs_requests.get(uuid=UUID) mocked_push = mock.patch( - "passerelle.contrib.toulouse_smart.models.SmartRequest.push", + 'passerelle.contrib.toulouse_smart.models.SmartRequest.push', return_value=False, ) mocked_push.start() @@ -809,7 +809,7 @@ def test_update_intervention_wrong_uuid(app, smart): ['/v1/type-intervention', None, INTERVENTION_TYPES], ['/v1/intervention', CREATE_INTERVENTION_QUERY, get_json_file('create_intervention')], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_update_intervention_job_wrong_service(mocked_uuid, app, smart, wcs_service): resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) assert not resp.json['err'] @@ -830,7 +830,7 @@ def test_update_intervention_job_wrong_service(mocked_uuid, app, smart, wcs_serv ['/v1/intervention', CREATE_INTERVENTION_QUERY, get_json_file('create_intervention')], ['/api/forms/foo/2/hooks/update_intervention/', UPDATE_INTERVENTION_QUERY, WCS_RESPONSE_ERROR, 403], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_update_intervention_job_wcs_error(mocked_uuid, app, smart, wcs_service, caplog): resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) assert not resp.json['err'] @@ -850,7 +850,7 @@ def test_update_intervention_job_wcs_error(mocked_uuid, app, smart, wcs_service, ['/v1/intervention', CREATE_INTERVENTION_QUERY, get_json_file('create_intervention')], ['/api/forms/foo/2/hooks/update_intervention/', UPDATE_INTERVENTION_QUERY, 'bla', 500], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_update_intervention_job_wcs_error_not_json(mocked_uuid, app, freezer, smart, wcs_service): freezer.move_to('2021-07-08 00:00:00') resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) @@ -884,7 +884,7 @@ def test_update_intervention_job_wcs_error_not_json(mocked_uuid, app, freezer, s ConnectionError('No address associated with hostname'), ], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_update_intervention_job_transport_error(mocked_uuid, app, freezer, smart, wcs_service): freezer.move_to('2021-07-08 00:00:00') resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) @@ -924,7 +924,7 @@ ADD_MEDIA_QUERY = [get_media_file('201x201.jpg')] ['/v1/intervention/%s/media' % INTERVENTION_ID, ADD_MEDIA_QUERY, 200], ['/v1/intervention', CREATE_INTERVENTION_QUERY, get_json_file('create_intervention')], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_add_media(mocked_uuid, app, smart): resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) assert not resp.json['err'] @@ -947,7 +947,7 @@ def test_add_media(mocked_uuid, app, smart): # smart not responding mocked_push = mock.patch( - "passerelle.contrib.toulouse_smart.models.WcsRequestFile.push", + 'passerelle.contrib.toulouse_smart.models.WcsRequestFile.push', return_value=False, ) mocked_push.start() @@ -981,7 +981,7 @@ def test_add_media_wrong_uuid(app, smart): ['/v1/intervention/%s/media' % json.loads(get_json_file('create_intervention'))['id'], None, None, 500], ['/v1/intervention', CREATE_INTERVENTION_QUERY, get_json_file('create_intervention')], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_add_media_error(mocked_uuid, app, freezer, smart): resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) assert not resp.json['err'] @@ -1004,7 +1004,7 @@ def test_add_media_error(mocked_uuid, app, freezer, smart): ['/v1/intervention/%s/media' % INTERVENTION_ID, None, None, None, ReadTimeout('timeout')], ['/v1/intervention', CREATE_INTERVENTION_QUERY, get_json_file('create_intervention')], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_add_media_timeout_error(mocked_uuid, app, freezer, smart): resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) assert not resp.json['err'] @@ -1027,7 +1027,7 @@ def test_add_media_timeout_error(mocked_uuid, app, freezer, smart): ['/v1/intervention/%s/media' % INTERVENTION_ID, ADD_MEDIA_QUERY, 200], ['/v1/intervention', CREATE_INTERVENTION_QUERY, '400 Client Error', 400], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_add_media_with_create_intervention_failure(mocked_uuid, app, smart): resp = app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) assert not resp.json['err'] @@ -1083,14 +1083,14 @@ UPDATE_INTERVENTION_QUERY_ON_ASYNC_CREATION['creation_response']['result'][ WCS_RESPONSE_SUCCESS, ], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_create_intervention_async(mocked_uuid4, app, smart, wcs_service): mocked_wcs_request_push = mock.patch( - "passerelle.contrib.toulouse_smart.models.WcsRequest.push", + 'passerelle.contrib.toulouse_smart.models.WcsRequest.push', return_value=False, ) mocked_smart_request_push = mock.patch( - "passerelle.contrib.toulouse_smart.models.SmartRequest.push", + 'passerelle.contrib.toulouse_smart.models.SmartRequest.push', return_value=False, ) @@ -1145,10 +1145,10 @@ def test_create_intervention_async(mocked_uuid4, app, smart, wcs_service): ['/v1/intervention/%s/media' % INTERVENTION_ID, ADD_MEDIA_QUERY, 200], ['/v1/intervention', CREATE_INTERVENTION_QUERY, get_json_file('create_intervention')], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_add_media_async(mocked_uuid4, app, smart, freezer): mocked_wcs_request_push = mock.patch( - "passerelle.contrib.toulouse_smart.models.WcsRequest.push", + 'passerelle.contrib.toulouse_smart.models.WcsRequest.push', return_value=False, ) @@ -1186,7 +1186,7 @@ def test_add_media_async(mocked_uuid4, app, smart, freezer): ['/v1/type-intervention', None, INTERVENTION_TYPES], ['/v1/intervention', None, get_json_file('create_intervention')], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_create_intervention_multiple_step(mocked_uuid4, app, smart): app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) assert smart.wcs_requests.get(uuid=UUID, wcs_form_step='initial') @@ -1252,7 +1252,7 @@ CREATE_INTERNVENTION_WITH_NONE = json.dumps( ['/v1/type-intervention', None, INTERVENTION_TYPES], ['/v1/intervention', CREATE_INTERVENTION_QUERY, CREATE_INTERNVENTION_WITH_NONE], ) -@mock.patch("django.db.models.fields.UUIDField.get_default", return_value=UUID) +@mock.patch('django.db.models.fields.UUIDField.get_default', return_value=UUID) def test_create_intervention_none_dates(mocked_uuid4, app, smart): app.post_json(URL + 'create-intervention/', params=CREATE_INTERVENTION_PAYLOAD) wcs_request = smart.wcs_requests.get(uuid=UUID) diff --git a/tests/test_utils_soap.py b/tests/test_utils_soap.py index fc6a04b3..69b7d108 100644 --- a/tests/test_utils_soap.py +++ b/tests/test_utils_soap.py @@ -79,7 +79,7 @@ def test_disable_strict_mode(mocked_post): soap_resource = SOAPResource() client = SOAPClient(soap_resource) - match = "Unexpected element %s, expected %s" % (repr('price'), repr('skipMe')) + match = 'Unexpected element %s, expected %s' % (repr('price'), repr('skipMe')) with pytest.raises(XMLParseError, match=match): client.service.GetLastTradePrice(tickerSymbol='banana') diff --git a/tests/test_views.py b/tests/test_views.py index b1d77140..465fa69c 100644 --- a/tests/test_views.py +++ b/tests/test_views.py @@ -4,6 +4,6 @@ from tests.test_manager import login def test_get_does_not_crash_on_unknown_connector(app, db, admin_user): app = login(app) - resp = app.get('/manage/opengis/noop/logs/', status="*") + resp = app.get('/manage/opengis/noop/logs/', status='*') assert resp.status_code == 404 diff --git a/tests/test_vivaticket.py b/tests/test_vivaticket.py index 1091984a..4742c0de 100644 --- a/tests/test_vivaticket.py +++ b/tests/test_vivaticket.py @@ -231,7 +231,7 @@ def test_get_api_key(mocked_post, app, connector): mocked_post.return_value = tests.utils.FakedResponse(content=KEY_RESPONSE, ok=True) connector.get_apikey() assert mocked_post.call_count == 2 - assert "Connect/PostConnect" in mocked_post.call_args[0][0] + assert 'Connect/PostConnect' in mocked_post.call_args[0][0] assert mocked_post.call_args[1]['json']['Login'] == 'foo' assert mocked_post.call_args[1]['json']['Password'] == 'bar' # make sure the key from cache is used @@ -248,7 +248,7 @@ def test_get_events(mocked_get, mocked_post, app, connector): mocked_post.return_value = tests.utils.FakedResponse(content=KEY_RESPONSE, status_code=200) result = tests.utils.endpoint_get('/vivaticket/test/events', app, connector, 'events') assert mocked_post.call_count == 1 - assert mocked_get.call_args[1]['params']['key'] == "86569D0CA1B1CBEF8D77DD5BDC9F5CBAE5C99074" + assert mocked_get.call_args[1]['params']['key'] == '86569D0CA1B1CBEF8D77DD5BDC9F5CBAE5C99074' assert 'data' in result.json for item in result.json['data']: assert 'id' in item @@ -358,7 +358,7 @@ def test_book(mocked_get, mocked_put, mocked_post, app, connector): payload['quantity'] = '01' payload['booking_comment'] = 'Booking comment' payload['room_comment'] = 'Room comment' - payload['form_url'] = "http://mysite.com/form/id/" + payload['form_url'] = 'http://mysite.com/form/id/' response = app.post_json(url, params=payload, status=400) assert "does not match '^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}$" in response.json['err_desc'] payload['start_datetime'] = '2019-01-15T10:00'