Compare commits
1 Commits
main
...
wip/parsif
Author | SHA1 | Date |
---|---|---|
Nicolas Roche | 331ec6ab11 |
|
@ -8,5 +8,3 @@ d2c0be039649febded68d9d04f745cd18b2b2e03
|
|||
989fb5271967e8e87fd57837dd6d8cfe932e7ebe
|
||||
# misc: apply djhtml (#69422)
|
||||
6da81964bd91b5656364357ec06776fed3529c8a
|
||||
# misc: apply double-quote-string-fixer (#79788)
|
||||
40142de8d2d9885f7a57f4b0f5ab1a593e13aaca
|
||||
|
|
|
@ -12,7 +12,5 @@ passerelle.egg-info/
|
|||
coverage.xml
|
||||
junit-py*.xml
|
||||
.sass-cache/
|
||||
passerelle/**/static/**/css/style.css
|
||||
passerelle/**/static/**/css/style.css.map
|
||||
node_modules/
|
||||
coverage/
|
||||
passerelle/static/css/style.css
|
||||
passerelle/static/css/style.css.map
|
||||
|
|
|
@ -1,10 +1,6 @@
|
|||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: double-quote-string-fixer
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.3.1
|
||||
hooks:
|
||||
|
@ -31,6 +27,6 @@ repos:
|
|||
- id: djhtml
|
||||
args: ['--tabwidth', '2']
|
||||
- repo: https://git.entrouvert.org/pre-commit-debian.git
|
||||
rev: v0.3
|
||||
rev: v0.1
|
||||
hooks:
|
||||
- id: pre-commit-debian
|
||||
|
|
|
@ -11,34 +11,19 @@ pipeline {
|
|||
RAND_TEST = "${Math.abs(new Random().nextInt(max+1))}"
|
||||
}
|
||||
stages {
|
||||
stage('Tests (in parallel)') {
|
||||
failFast true
|
||||
parallel {
|
||||
stage('Unit Tests (pytest)') {
|
||||
steps {
|
||||
sh "NUMPROCESSES=12 RAND_TEST=${env.RAND_TEST} tox -rv"
|
||||
}
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
utils = new Utils()
|
||||
utils.publish_coverage('coverage.xml')
|
||||
utils.publish_coverage_native('index.html')
|
||||
utils.publish_pylint('pylint.out')
|
||||
}
|
||||
mergeJunitResults()
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Unit Tests (vitest)') {
|
||||
steps {
|
||||
sh "NUMPROCESSES=12 RAND_TEST=${env.RAND_TEST} tox -rv -e vitest"
|
||||
}
|
||||
}
|
||||
stage('Linter (pylint)') {
|
||||
steps {
|
||||
sh "NUMPROCESSES=12 RAND_TEST=${env.RAND_TEST} tox -rv -e pylint"
|
||||
stage('Unit Tests') {
|
||||
steps {
|
||||
sh "NUMPROCESSES=6 RAND_TEST=${env.RAND_TEST} tox -rv"
|
||||
}
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
utils = new Utils()
|
||||
utils.publish_coverage('coverage.xml')
|
||||
utils.publish_coverage_native('index.html')
|
||||
utils.publish_pylint('pylint.out')
|
||||
}
|
||||
mergeJunitResults()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -54,9 +39,9 @@ pipeline {
|
|||
'''
|
||||
).trim()
|
||||
if (env.GIT_BRANCH == 'main' || env.GIT_BRANCH == 'origin/main') {
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye,bookworm ${SHORT_JOB_NAME}"
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye ${SHORT_JOB_NAME}"
|
||||
} else if (env.GIT_BRANCH.startsWith('hotfix/')) {
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye,bookworm --branch ${env.GIT_BRANCH} --hotfix ${SHORT_JOB_NAME}"
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye --branch ${env.GIT_BRANCH} --hotfix ${SHORT_JOB_NAME}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
15
README
15
README
|
@ -126,18 +126,3 @@ django-jsonresponse (https://github.com/jjay/django-jsonresponse)
|
|||
# Files: passerelle/utils/jsonresponse.py
|
||||
# Copyright (c) 2012 Yasha Borevich <j.borevich@gmail.com>
|
||||
# Licensed under the BSD license
|
||||
|
||||
tweetnacl-js (https://github.com/dchest/tweetnacl-js)
|
||||
# Files: passerelle/apps/qrcode/static/qrcode/js/nacl.min.js
|
||||
# Copyright: https://github.com/dchest/tweetnacl-js/blob/master/AUTHORS.md
|
||||
# Licensed under the Unlicense license (public domain)
|
||||
|
||||
zxing-browser (https://github.com/zxing-js/browser/)
|
||||
# Files: passerelle/apps/qrcode/static/qrcode/js/zxing-browser.min.js
|
||||
# Copyright: (c) 2018 ZXing for JS
|
||||
# Licensed under the MIT license.
|
||||
|
||||
RemixIcon (https://github.com/Remix-Design/RemixIcon)
|
||||
# Files: passerelle/apps/qrcode/static/qrcode/img/favicon.ico
|
||||
# Copyright (c) 2020 RemixIcon.com
|
||||
# Licensed under the Apache License Version 2.0
|
||||
|
|
|
@ -16,9 +16,7 @@ Architecture: all
|
|||
Depends: ghostscript,
|
||||
pdftk,
|
||||
poppler-utils,
|
||||
python3-caldav,
|
||||
python3-cmislib,
|
||||
python3-cryptography,
|
||||
python3-dateutil,
|
||||
python3-distutils,
|
||||
python3-django (>= 2:3.2),
|
||||
|
@ -45,7 +43,6 @@ Depends: ghostscript,
|
|||
python3-uwsgidecorators,
|
||||
python3-vobject,
|
||||
python3-xmlschema,
|
||||
python3-xmltodict,
|
||||
python3-zeep (>= 3.2),
|
||||
${misc:Depends},
|
||||
${python3:Depends},
|
||||
|
@ -63,9 +60,8 @@ Depends: adduser,
|
|||
uwsgi,
|
||||
uwsgi-plugin-python3,
|
||||
${misc:Depends},
|
||||
Recommends: memcached,
|
||||
nginx,
|
||||
Suggests: postgresql,
|
||||
Breaks: python-passerelle (<<5.75.post9),
|
||||
Replaces: python-passerelle (<<5.75.post9),
|
||||
Recommends: memcached, nginx
|
||||
Suggests: postgresql
|
||||
Breaks: python-passerelle (<<5.75.post9)
|
||||
Replaces: python-passerelle (<<5.75.post9)
|
||||
Description: Uniform access to multiple data sources and services
|
||||
|
|
|
@ -4,7 +4,6 @@ After=network.target postgresql.service
|
|||
Wants=postgresql.service
|
||||
|
||||
[Service]
|
||||
SyslogIdentifier=uwsgi/%p
|
||||
Environment=PASSERELLE_SETTINGS_FILE=/usr/lib/%p/debian_config.py
|
||||
Environment=PASSERELLE_WSGI_TIMEOUT=120
|
||||
Environment=PASSERELLE_WSGI_WORKERS=5
|
||||
|
|
|
@ -18,7 +18,6 @@ spooler-python-import = passerelle.utils.spooler
|
|||
spooler-max-tasks = 20
|
||||
|
||||
# every five minutes
|
||||
unique-cron = -5 -1 -1 -1 -1 /usr/bin/passerelle-manage tenant_command cron --all-tenants every5min
|
||||
unique-cron = -5 -1 -1 -1 -1 /usr/bin/passerelle-manage tenant_command cron --all-tenants availability
|
||||
unique-cron = -5 -1 -1 -1 -1 /usr/bin/passerelle-manage tenant_command cron --all-tenants jobs
|
||||
# hourly
|
||||
|
|
|
@ -2,23 +2,23 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption('--url', help='Url of a passerelle Caluire Axel connector instance')
|
||||
parser.addoption('--nameid', help='Publik Name ID')
|
||||
parser.addoption('--firstname', help='first name of a user')
|
||||
parser.addoption('--lastname', help='Last name of a user')
|
||||
parser.addoption('--family', help='Family ID')
|
||||
parser.addoption("--url", help="Url of a passerelle Caluire Axel connector instance")
|
||||
parser.addoption("--nameid", help="Publik Name ID")
|
||||
parser.addoption("--firstname", help="first name of a user")
|
||||
parser.addoption("--lastname", help="Last name of a user")
|
||||
parser.addoption("--family", help="Family ID")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption('--url')
|
||||
return request.config.getoption("--url")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def user(request):
|
||||
return {
|
||||
'name_id': request.config.getoption('--nameid'),
|
||||
'first_name': request.config.getoption('--firstname'),
|
||||
'last_name': request.config.getoption('--lastname'),
|
||||
'family': request.config.getoption('--family'),
|
||||
'name_id': request.config.getoption("--nameid"),
|
||||
'first_name': request.config.getoption("--firstname"),
|
||||
'last_name': request.config.getoption("--lastname"),
|
||||
'family': request.config.getoption("--family"),
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ def test_link(conn, user):
|
|||
'NOM': user['last_name'],
|
||||
'PRENOM': user['first_name'],
|
||||
}
|
||||
print('Creating link with the following payload:')
|
||||
print("Creating link with the following payload:")
|
||||
pprint.pprint(payload)
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
|
@ -21,7 +21,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print('GET family info')
|
||||
print("GET family info")
|
||||
url = conn + '/family_info?NameID=%s' % name_id
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -30,7 +30,7 @@ def test_link(conn, user):
|
|||
assert data['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print('GET children info')
|
||||
print("GET children info")
|
||||
url = conn + '/children_info?NameID=%s' % (name_id)
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -40,7 +40,7 @@ def test_link(conn, user):
|
|||
print('\n')
|
||||
|
||||
for child in data['data']['MEMBRE']:
|
||||
print('GET child info')
|
||||
print("GET child info")
|
||||
url = conn + '/child_info?NameID=%s&idpersonne=%s' % (name_id, child['IDENT'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -49,7 +49,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print('and GET school info')
|
||||
print("and GET school info")
|
||||
url = conn + '/child_schooling_info?NameID=%s&idpersonne=%s&schooling_date=%s' % (
|
||||
name_id,
|
||||
child['IDENT'],
|
||||
|
@ -62,7 +62,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print('and GET activities info')
|
||||
print("and GET activities info")
|
||||
url = conn + '/child_activities_info?NameID=%s&idpersonne=%s&schooling_date=%s' % (
|
||||
name_id,
|
||||
child['IDENT'],
|
||||
|
@ -75,7 +75,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print('GET school list')
|
||||
print("GET school list")
|
||||
url = conn + '/school_list'
|
||||
payload = {
|
||||
'num': data['data']['RESPONSABLE1']['ADRESSE']['NORUE'],
|
||||
|
@ -92,7 +92,7 @@ def test_link(conn, user):
|
|||
print('\n')
|
||||
return
|
||||
|
||||
print('Deleting link')
|
||||
print("Deleting link")
|
||||
url = conn + '/unlink?NameID=%s' % name_id
|
||||
resp = requests.post(url)
|
||||
resp.raise_for_status()
|
||||
|
|
|
@ -5,25 +5,25 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption('--cmis-connector-url', help='Url of a passerelle CMIS connector instance')
|
||||
parser.addoption('--cmis-endpoint', help='Url of a passerelle CMIS endpoint')
|
||||
parser.addoption('--cmis-username', help='Username for the CMIS endpoint')
|
||||
parser.addoption('--cmis-password', help='Password for the CMIS endpoint')
|
||||
parser.addoption('--preserve-tree', action='store_true', default=False, help='Preserve test directory')
|
||||
parser.addoption("--cmis-connector-url", help="Url of a passerelle CMIS connector instance")
|
||||
parser.addoption("--cmis-endpoint", help="Url of a passerelle CMIS endpoint")
|
||||
parser.addoption("--cmis-username", help="Username for the CMIS endpoint")
|
||||
parser.addoption("--cmis-password", help="Password for the CMIS endpoint")
|
||||
parser.addoption("--preserve-tree", action="store_true", default=False, help="Preserve test directory")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def cmisclient(request):
|
||||
return cmislib.CmisClient(
|
||||
request.config.getoption('--cmis-endpoint'),
|
||||
request.config.getoption('--cmis-username'),
|
||||
request.config.getoption('--cmis-password'),
|
||||
request.config.getoption("--cmis-endpoint"),
|
||||
request.config.getoption("--cmis-username"),
|
||||
request.config.getoption("--cmis-password"),
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def cmis_connector(request):
|
||||
return request.config.getoption('--cmis-connector-url')
|
||||
return request.config.getoption("--cmis-connector-url")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
|
@ -31,6 +31,6 @@ def cmis_tmpdir(cmisclient, request):
|
|||
path = 'test-%s' % random.randint(0, 10000)
|
||||
folder = cmisclient.defaultRepository.rootFolder.createFolder(path)
|
||||
yield folder.properties['cmis:path']
|
||||
preserve_tree = request.config.getoption('--preserve-tree')
|
||||
preserve_tree = request.config.getoption("--preserve-tree")
|
||||
if not preserve_tree:
|
||||
folder.deleteTree()
|
||||
|
|
|
@ -10,7 +10,7 @@ SPECIAL_CHARS = '!#$%&+-^_`;[]{}+='
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'path,file_name',
|
||||
"path,file_name",
|
||||
[
|
||||
('', 'some.file'),
|
||||
('/toto', 'some.file'),
|
||||
|
@ -31,8 +31,8 @@ def test_uploadfile(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, monkeypatch
|
|||
response = requests.post(
|
||||
url,
|
||||
json={
|
||||
'path': cmis_tmpdir + path,
|
||||
'file': {'content': file_b64_content, 'filename': file_name, 'content_type': 'image/jpeg'},
|
||||
"path": cmis_tmpdir + path,
|
||||
"file": {"content": file_b64_content, "filename": file_name, "content_type": "image/jpeg"},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
@ -59,8 +59,8 @@ def test_uploadfile_conflict(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, mo
|
|||
response = requests.post(
|
||||
url,
|
||||
json={
|
||||
'path': cmis_tmpdir + '/uploadconflict',
|
||||
'file': {'content': file_b64_content, 'filename': 'some.file', 'content_type': 'image/jpeg'},
|
||||
"path": cmis_tmpdir + '/uploadconflict',
|
||||
"file": {"content": file_b64_content, "filename": 'some.file', "content_type": "image/jpeg"},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
@ -70,11 +70,11 @@ def test_uploadfile_conflict(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, mo
|
|||
response = requests.post(
|
||||
url,
|
||||
json={
|
||||
'path': cmis_tmpdir + '/uploadconflict',
|
||||
'file': {'content': file_b64_content, 'filename': 'some.file', 'content_type': 'image/jpeg'},
|
||||
"path": cmis_tmpdir + '/uploadconflict',
|
||||
"file": {"content": file_b64_content, "filename": 'some.file', "content_type": "image/jpeg"},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
resp_data = response.json()
|
||||
assert resp_data['err'] == 1
|
||||
assert resp_data['err_desc'].startswith('update conflict')
|
||||
assert resp_data['err_desc'].startswith("update conflict")
|
||||
|
|
|
@ -2,9 +2,9 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption('--url', help='Url of a passerelle Planitech connector instance')
|
||||
parser.addoption("--url", help="Url of a passerelle Planitech connector instance")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption('--url')
|
||||
return request.config.getoption("--url")
|
||||
|
|
|
@ -113,7 +113,7 @@ def test_main(conn):
|
|||
|
||||
|
||||
def call_generic(conn, endpoint):
|
||||
print('%s \n' % endpoint)
|
||||
print("%s \n" % endpoint)
|
||||
url = conn + '/%s' % endpoint
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
|
|
@ -2,25 +2,25 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption('--url', help='Url of a passerelle Toulouse Axel connector instance')
|
||||
parser.addoption('--nameid', help='Publik Name ID')
|
||||
parser.addoption('--firstname', help='first name of a user')
|
||||
parser.addoption('--lastname', help='Last name of a user')
|
||||
parser.addoption('--dob', help='Date of birth of a user')
|
||||
parser.addoption('--dui', help='DUI number')
|
||||
parser.addoption("--url", help="Url of a passerelle Toulouse Axel connector instance")
|
||||
parser.addoption("--nameid", help="Publik Name ID")
|
||||
parser.addoption("--firstname", help="first name of a user")
|
||||
parser.addoption("--lastname", help="Last name of a user")
|
||||
parser.addoption("--dob", help="Date of birth of a user")
|
||||
parser.addoption("--dui", help="DUI number")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption('--url')
|
||||
return request.config.getoption("--url")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def user(request):
|
||||
return {
|
||||
'name_id': request.config.getoption('--nameid'),
|
||||
'first_name': request.config.getoption('--firstname'),
|
||||
'last_name': request.config.getoption('--lastname'),
|
||||
'dob': request.config.getoption('--dob'),
|
||||
'dui': request.config.getoption('--dui'),
|
||||
'name_id': request.config.getoption("--nameid"),
|
||||
'first_name': request.config.getoption("--firstname"),
|
||||
'last_name': request.config.getoption("--lastname"),
|
||||
'dob': request.config.getoption("--dob"),
|
||||
'dui': request.config.getoption("--dui"),
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ import requests
|
|||
|
||||
|
||||
def test_link(conn, user):
|
||||
print('Get update management dates')
|
||||
print("Get update management dates")
|
||||
url = conn + '/management_dates'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -21,7 +21,7 @@ def test_link(conn, user):
|
|||
'PRENOM': user['first_name'],
|
||||
'NAISSANCE': user['dob'],
|
||||
}
|
||||
print('Creating link with the following payload:')
|
||||
print("Creating link with the following payload:")
|
||||
pprint.pprint(payload)
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
|
@ -30,7 +30,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print('GET family info')
|
||||
print("GET family info")
|
||||
url = conn + '/family_info?NameID=%s' % name_id
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -158,7 +158,7 @@ def test_link(conn, user):
|
|||
for key in flags:
|
||||
payload[key] = True
|
||||
|
||||
print('Update family info with the following payload:')
|
||||
print("Update family info with the following payload:")
|
||||
pprint.pprint(payload)
|
||||
url = conn + '/update_family_info?NameID=%s' % name_id
|
||||
resp = requests.post(url, json=payload)
|
||||
|
@ -168,7 +168,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print('GET children info')
|
||||
print("GET children info")
|
||||
url = conn + '/children_info?NameID=%s' % (name_id)
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -178,7 +178,7 @@ def test_link(conn, user):
|
|||
print('\n')
|
||||
|
||||
for child in data['data']['ENFANT']:
|
||||
print('GET child info')
|
||||
print("GET child info")
|
||||
url = conn + '/child_info?NameID=%s&idpersonne=%s' % (name_id, child['IDPERSONNE'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -187,7 +187,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print('GET child contact info')
|
||||
print("GET child contact info")
|
||||
url = conn + '/child_contacts_info?NameID=%s&idpersonne=%s' % (name_id, child['IDPERSONNE'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -196,7 +196,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print('Deleting link')
|
||||
print("Deleting link")
|
||||
url = conn + '/unlink?NameID=%s' % name_id
|
||||
resp = requests.post(url)
|
||||
resp.raise_for_status()
|
||||
|
|
|
@ -21,7 +21,7 @@ FAMILY_PAYLOAD = {
|
|||
'rl1': {
|
||||
'civility': 'MME',
|
||||
'firstname': 'Marge',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'maidenName': 'Bouvier',
|
||||
'quality': 'MERE',
|
||||
'birth': {
|
||||
|
@ -32,14 +32,14 @@ FAMILY_PAYLOAD = {
|
|||
'idStreet': '2317',
|
||||
'num': '4',
|
||||
'street1': 'requeried having idStreet provided',
|
||||
'town': 'Toulouse',
|
||||
'zipcode': '31400',
|
||||
'town': 'Springfield',
|
||||
'zipcode': '62701',
|
||||
},
|
||||
},
|
||||
'rl2': {
|
||||
'civility': 'MR',
|
||||
'firstname': 'Homer',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'quality': 'PERE',
|
||||
'birth': {
|
||||
'dateBirth': '1956-05-12',
|
||||
|
@ -96,7 +96,7 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'M',
|
||||
'firstname': 'Bart',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'birth': {
|
||||
'dateBirth': '2014-04-01',
|
||||
'place': 'Brive-la-Gaillarde',
|
||||
|
@ -133,11 +133,11 @@ FAMILY_PAYLOAD = {
|
|||
'hospital': 'Springfield General Hospital',
|
||||
'vaccinList': [
|
||||
{
|
||||
'code': '8',
|
||||
'code': '45',
|
||||
'vaccinationDate': '2011-01-11',
|
||||
},
|
||||
{
|
||||
'code': '1',
|
||||
'code': '24',
|
||||
'vaccinationDate': '2022-02-22',
|
||||
},
|
||||
],
|
||||
|
@ -158,7 +158,7 @@ FAMILY_PAYLOAD = {
|
|||
'personInfo': {
|
||||
'civility': 'MR',
|
||||
'firstname': 'Abraham Jebediah',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'dateBirth': '1927-05-24',
|
||||
'sexe': 'M',
|
||||
'contact': {
|
||||
|
@ -175,7 +175,7 @@ FAMILY_PAYLOAD = {
|
|||
'personInfo': {
|
||||
'civility': 'MME',
|
||||
'firstname': 'Mona Penelope',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'dateBirth': '1929-03-15',
|
||||
'sexe': 'F',
|
||||
'contact': {
|
||||
|
@ -193,7 +193,7 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'F',
|
||||
'firstname': 'Lisa',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'birth': {'dateBirth': '2016-05-09'},
|
||||
'dietcode': 'MENU_SV',
|
||||
'paiInfoBean': {
|
||||
|
@ -203,7 +203,7 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'F',
|
||||
'firstname': 'Maggie',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'birth': {'dateBirth': '2018-12-17'},
|
||||
'dietcode': 'MENU_PAI',
|
||||
'paiInfoBean': {
|
||||
|
@ -213,7 +213,7 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'M',
|
||||
'firstname': 'Hugo',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'birth': {'dateBirth': '2018-04-01'},
|
||||
'dietcode': 'MENU_AV',
|
||||
'paiInfoBean': {
|
||||
|
@ -261,10 +261,7 @@ def pytest_addoption(parser):
|
|||
parser.addoption('--nameid', help='Publik Name ID', default='functest')
|
||||
parser.addoption('--dui', help='DUI number', default='')
|
||||
parser.addoption(
|
||||
'--lastname', help='override lastname to create a new "update" family', default='Test_Simpson'
|
||||
)
|
||||
parser.addoption(
|
||||
'--quick', action='store_true', help='do not reload referentials to speed-up tests', default=False
|
||||
'--lastname', help='override lastname to create a new "update" family', default='Simpson'
|
||||
)
|
||||
|
||||
|
||||
|
@ -351,7 +348,6 @@ def remove_id_on_rlg(conn, rlg):
|
|||
rlg['indicatorList'].sort(key=lambda x: x['code'])
|
||||
rlg['quotientList'].sort(key=lambda x: (x['yearRev'], x['dateStart']))
|
||||
del rlg['indicators'] # order may change
|
||||
del rlg['quotients'] # order may change
|
||||
rlg['subscribeActivityList'] = [] # not managed by test yet
|
||||
del rlg['subscribe_natures'] # order may change
|
||||
|
||||
|
@ -406,10 +402,7 @@ def conn(request):
|
|||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def referentials(request, conn):
|
||||
quick = request.config.getoption('--quick')
|
||||
if quick:
|
||||
return
|
||||
def referentials(conn):
|
||||
url = urlparse.urlparse(conn)
|
||||
slug = url.path.split('/')[2]
|
||||
cmd = (
|
||||
|
@ -422,10 +415,10 @@ def referentials(request, conn):
|
|||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def create_data(request, conn, reference_year):
|
||||
def create_data(request, conn):
|
||||
name_id = request.config.getoption('--nameid')
|
||||
unlink(conn, name_id)
|
||||
lastname = 'TEST_' + uuid4().hex[0:25]
|
||||
lastname = 'EO_' + uuid4().hex[0:27]
|
||||
|
||||
# create family
|
||||
create_family_payload = copy.deepcopy(FAMILY_PAYLOAD)
|
||||
|
@ -449,21 +442,6 @@ def create_data(request, conn, reference_year):
|
|||
resp.raise_for_status()
|
||||
create_result = resp.json()
|
||||
assert create_result['err'] == 0
|
||||
|
||||
# add requiered quotient for subscriptions
|
||||
data = read_family(conn, name_id)
|
||||
url = conn + '/update-quotient?NameID=%s&rl_id=%s' % (name_id, data['RL1']['num'])
|
||||
payload = {
|
||||
'yearRev': str(reference_year),
|
||||
'dateStart': '%s-09-01' % (reference_year),
|
||||
'dateEnd': '3000-08-31',
|
||||
'mtt': '5000.0',
|
||||
'cdquo': '1',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
print('\ncreate DUI: %s' % str(create_result['data']['number']))
|
||||
data = diff_family(conn, name_id, 'test_create_family.json')
|
||||
|
||||
|
@ -480,58 +458,6 @@ def create_data(request, conn, reference_year):
|
|||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def create_data2(request, conn, reference_year):
|
||||
name_id = request.config.getoption('--nameid')
|
||||
unlink(conn, name_id)
|
||||
lastname = 'TEST_' + uuid4().hex[0:25]
|
||||
|
||||
# create family that is not located into Toulouse
|
||||
create_family_payload = copy.deepcopy(FAMILY_PAYLOAD)
|
||||
create_family_payload['rl1']['lastname'] = lastname
|
||||
create_family_payload['rl1']['adresse'] = create_family_payload['rl2']['adresse']
|
||||
create_family_payload['rl2']['adresse'] = copy.deepcopy(FAMILY_PAYLOAD['rl1']['adresse'])
|
||||
for child in create_family_payload['childList']:
|
||||
child['lastname'] = lastname
|
||||
|
||||
url = conn + '/create-family?NameID=%s' % name_id
|
||||
resp = requests.post(url, json=create_family_payload)
|
||||
resp.raise_for_status()
|
||||
create_result = resp.json()
|
||||
assert create_result['err'] == 0
|
||||
|
||||
# add requiered quotient for subscriptions
|
||||
data = read_family(conn, name_id)
|
||||
url = conn + '/update-quotient?NameID=%s&rl_id=%s' % (name_id, data['RL1']['num'])
|
||||
payload = {
|
||||
'yearRev': str(reference_year),
|
||||
'dateStart': '2023-05-15',
|
||||
'dateEnd': '3000-12-31',
|
||||
'mtt': '5000.0',
|
||||
'cdquo': '1',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
print('\ncreate DUI again: %s' % str(create_result['data']['number']))
|
||||
data = diff_family(conn, name_id, 'test_create_family_out_town.json')
|
||||
|
||||
return {
|
||||
'name_id': name_id, # linked
|
||||
'family_id': str(create_result['data']['number']),
|
||||
'family_payload': create_family_payload,
|
||||
'lastname': lastname,
|
||||
'rl1_num': data['RL1']['num'],
|
||||
'rl2_num': data['RL2']['num'],
|
||||
'bart_num': data['childList'][0]['num'],
|
||||
'lisa_num': data['childList'][1]['num'],
|
||||
'maggie_num': data['childList'][2]['num'],
|
||||
'hugo_num': data['childList'][3]['num'],
|
||||
'data': data,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def update_data(request, conn):
|
||||
name_id = request.config.getoption('--nameid')
|
||||
|
@ -690,102 +616,22 @@ def get_subscription_info(nature, activity_text, unit_text, place_text, con, nam
|
|||
}
|
||||
|
||||
|
||||
def get_loisirs_subscribe_info(con, data, year):
|
||||
return get_subscription_info(
|
||||
'LOISIRS',
|
||||
# Sigec made this loisirs activity available for functests
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES',
|
||||
'MERCREDI - 15h30/17h - 8/15Ans',
|
||||
'ARGOULETS',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
def get_loisirs_subscribe_info3(con, data, year):
|
||||
return get_subscription_info(
|
||||
'LOISIRS',
|
||||
# Sigec made this loisirs activity available for functests
|
||||
'Vitrail Fusing 1/2 Je Adultes',
|
||||
'Inscription annuelle',
|
||||
'Centre Culturel ALBAN MINVILLE',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
def get_extrasco_subscribe_info(con, data, year):
|
||||
return get_subscription_info(
|
||||
'EXTRASCO',
|
||||
# Sigec made this extra-sco activity available for functests
|
||||
'ADL ELEMENTAIRE Maourine Juin',
|
||||
'PUBLIK ADL ELEMENTAIRE Maourine JUIN 22/23(NE PAS UTILISER)',
|
||||
'MAOURINE (la) ELEMENTAIRE',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
def get_extrasco_subscribe_info2(con, data, year):
|
||||
return get_subscription_info(
|
||||
'EXTRASCO',
|
||||
# Sigec made this extra-sco activity available for functests
|
||||
'ADL MATERNELLE Lardenne Juin',
|
||||
'PUBLIK ADL MATER JOURNEE AVEC REPAS',
|
||||
'LARDENNE MATERNELLE',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def loisirs_subscribe_info(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
return get_loisirs_subscribe_info(conn, create_data, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def loisirs_subscribe_info2(conn, create_data2, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
return get_loisirs_subscribe_info(conn, create_data2, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def loisirs_subscribe_info3(conn, create_data2, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
return get_loisirs_subscribe_info3(conn, create_data2, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def extrasco_subscribe_info(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
return get_extrasco_subscribe_info(conn, create_data, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def extrasco_subscribe_info2(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
return get_extrasco_subscribe_info2(conn, create_data, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def extrasco_subscribe_info3(conn, create_data2, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
return get_extrasco_subscribe_info2(conn, create_data2, reference_year)
|
||||
return get_subscription_info(
|
||||
'EXTRASCO',
|
||||
# Sigec made this extra-sco activity available for functests
|
||||
'ADL ELEMENTAIRE Maourine Avril 2023',
|
||||
'ADL ELEMENTAIRE Maourine Avril 2023',
|
||||
'MAOURINE (la) ELEMENTAIRE',
|
||||
conn,
|
||||
create_data['name_id'],
|
||||
create_data['bart_num'],
|
||||
reference_year,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
|
@ -799,32 +645,11 @@ def perisco_subscribe_info(conn, create_data, reference_year):
|
|||
return get_subscription_info(
|
||||
None,
|
||||
# Sigec made this peri-sco activity available for functests
|
||||
'Temps du midi',
|
||||
'TEST TEMPS DU MIDI 22/23',
|
||||
'AMIDONNIERS ELEMENTAIRE',
|
||||
'TEMPS DU MIDI 22/23',
|
||||
'TEMPS DU MIDI 22/23',
|
||||
'DOLTO FRANCOISE MATERNELLE',
|
||||
conn,
|
||||
create_data['name_id'],
|
||||
create_data['bart_num'],
|
||||
reference_year,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def perisco_subscribe_adulte_info(conn, create_data2, reference_year):
|
||||
'''This fixture is a configuration trick from Sigec
|
||||
as peri-sco should not be available for subscription
|
||||
and as a consequence, should not be displayed from catalogs'''
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
return get_subscription_info(
|
||||
None,
|
||||
# Sigec made this peri-sco activity available for functests
|
||||
'RESTAURATION ADULTE',
|
||||
'TEST RESTAURATION ADULTE 22/23',
|
||||
'DOLTO FRANCOISE MATERNELLE',
|
||||
conn,
|
||||
create_data2['name_id'],
|
||||
create_data2['bart_num'],
|
||||
reference_year,
|
||||
)
|
||||
|
|
|
@ -7,14 +7,6 @@
|
|||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUTO_OUT",
|
||||
"code": "AUTO_OUT",
|
||||
"text": "Autorisation de sortie - CLAE",
|
||||
"libelle": "Autorisation de sortie - CLAE",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUTRE",
|
||||
"code": "AUTRE",
|
||||
|
@ -24,30 +16,6 @@
|
|||
"isActive": true,
|
||||
"note": "rebellious"
|
||||
},
|
||||
{
|
||||
"id": "AUT_OUTADL",
|
||||
"code": "AUT_OUTADL",
|
||||
"text": "Autorisation de sortie - ADL",
|
||||
"libelle": "Autorisation de sortie - ADL",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUT_SANT",
|
||||
"code": "AUT_SANT",
|
||||
"text": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"libelle": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUT_TRANS",
|
||||
"code": "AUT_TRANS",
|
||||
"text": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"libelle": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AVL",
|
||||
"code": "AVL",
|
||||
|
@ -59,8 +27,8 @@
|
|||
{
|
||||
"id": "AVS",
|
||||
"code": "AVS",
|
||||
"text": "Auxiliaire de Vie scolaire",
|
||||
"libelle": "Auxiliaire de Vie scolaire ",
|
||||
"text": "Assistant de Vie scolaire",
|
||||
"libelle": "Assistant de Vie scolaire ",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
|
@ -73,14 +41,6 @@
|
|||
"isActive": false,
|
||||
"note": null
|
||||
},
|
||||
{
|
||||
"id": "HPURG",
|
||||
"code": "HPURG",
|
||||
"text": "Hospitalisation / musures d'urgence",
|
||||
"libelle": "Hospitalisation / musures d'urgence",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "LENTILLE",
|
||||
"code": "LENTILLE",
|
||||
|
|
|
@ -27,8 +27,8 @@
|
|||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"town": "Springfield",
|
||||
"zipcode": "62701",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
|
@ -42,17 +42,7 @@
|
|||
"profession": null,
|
||||
"CAFInfo": null,
|
||||
"indicatorList": [],
|
||||
"quotientList": [
|
||||
{
|
||||
"yearRev": 2022,
|
||||
"dateStart": "2022-09-01T00:00:00+02:00",
|
||||
"dateEnd": "3000-08-31T00:00:00+02:00",
|
||||
"mtt": 5000.0,
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
}
|
||||
],
|
||||
"quotientList": [],
|
||||
"subscribeActivityList": [],
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "M\u00e8re"
|
||||
|
@ -72,8 +62,7 @@
|
|||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
"cdDepartment_text": "CORREZE"
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": true,
|
||||
|
@ -82,7 +71,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"lastname": "SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
|
@ -104,7 +93,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"lastname": "SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
@ -160,13 +149,13 @@
|
|||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -1,408 +0,0 @@
|
|||
{
|
||||
"number": "N/A",
|
||||
"category": "BI",
|
||||
"situation": "MARI",
|
||||
"flagCom": false,
|
||||
"nbChild": 3,
|
||||
"nbTotalChild": 4,
|
||||
"nbAES": "1",
|
||||
"RL1": {
|
||||
"num": "N/A",
|
||||
"firstname": "MARGE",
|
||||
"lastname": "N/A",
|
||||
"maidenName": "BOUVIER",
|
||||
"quality": "MERE",
|
||||
"civility": "MME",
|
||||
"birth": {
|
||||
"dateBirth": "1950-10-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
"num": 742,
|
||||
"numComp": null,
|
||||
"street1": "Evergreen Terrace",
|
||||
"street2": null,
|
||||
"town": "Springfield",
|
||||
"zipcode": "90701"
|
||||
},
|
||||
"contact": {
|
||||
"phone": null,
|
||||
"mobile": null,
|
||||
"mail": null,
|
||||
"isContactMail": false,
|
||||
"isContactSms": false,
|
||||
"isInvoicePdf": false
|
||||
},
|
||||
"profession": null,
|
||||
"CAFInfo": null,
|
||||
"indicatorList": [],
|
||||
"quotientList": [
|
||||
{
|
||||
"yearRev": 2022,
|
||||
"dateStart": "2023-05-15T00:00:00+02:00",
|
||||
"dateEnd": "3000-12-31T00:00:00+01:00",
|
||||
"mtt": 5000.0,
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
}
|
||||
],
|
||||
"subscribeActivityList": [],
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "M\u00e8re"
|
||||
},
|
||||
"RL2": {
|
||||
"num": "N/A",
|
||||
"firstname": "HOMER",
|
||||
"lastname": "N/A",
|
||||
"maidenName": null,
|
||||
"quality": "PERE",
|
||||
"civility": "MR",
|
||||
"birth": {
|
||||
"dateBirth": "1956-05-12T00:00:00+01:00",
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": "2317",
|
||||
"num": 4,
|
||||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
"phone": "0122222222",
|
||||
"mobile": "0622222222",
|
||||
"mail": "homer.simpson@example.org.com",
|
||||
"isContactMail": true,
|
||||
"isContactSms": true,
|
||||
"isInvoicePdf": true
|
||||
},
|
||||
"profession": {
|
||||
"codeCSP": "46",
|
||||
"profession": "Inspecteur de s\u00e9curit\u00e9",
|
||||
"employerName": "Burns",
|
||||
"phone": "0133333333",
|
||||
"addressPro": {
|
||||
"num": null,
|
||||
"street": null,
|
||||
"zipcode": "90701",
|
||||
"town": "Springfield"
|
||||
},
|
||||
"situation": null,
|
||||
"weeklyHours": null,
|
||||
"codeCSP_text": "EMPLOYES"
|
||||
},
|
||||
"CAFInfo": {
|
||||
"number": "123",
|
||||
"organ": "GENE",
|
||||
"organ_text": "CAF 31"
|
||||
},
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "AVL",
|
||||
"libelle": "Auxiliaire de Vie loisirs",
|
||||
"note": null,
|
||||
"choice": null,
|
||||
"code_text": "Auxiliaire de Vie loisirs"
|
||||
},
|
||||
{
|
||||
"code": "ETABSPEC",
|
||||
"libelle": "Etablissement sp\u00e9cialis\u00e9",
|
||||
"note": "SNPP",
|
||||
"choice": null,
|
||||
"code_text": "Etablissement sp\u00e9cialis\u00e9"
|
||||
}
|
||||
],
|
||||
"quotientList": [],
|
||||
"subscribeActivityList": [],
|
||||
"civility_text": "MONSIEUR",
|
||||
"quality_text": "P\u00e8re"
|
||||
},
|
||||
"quotientList": [],
|
||||
"childList": [
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "BART",
|
||||
"sexe": "M",
|
||||
"birth": {
|
||||
"dateBirth": "2014-04-01T00:00:00+02:00",
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": true,
|
||||
"bLeaveAlone": true,
|
||||
"authorizedPersonList": [
|
||||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
"sexe": "M",
|
||||
"contact": {
|
||||
"phone": "0312345678",
|
||||
"mobile": null,
|
||||
"mail": "abe.simpson@example.org"
|
||||
},
|
||||
"civility_text": "MONSIEUR",
|
||||
"sexe_text": "Masculin"
|
||||
},
|
||||
"personQuality": {
|
||||
"code": "13",
|
||||
"libelle": "Famille",
|
||||
"code_text": "Famille"
|
||||
}
|
||||
},
|
||||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
"sexe": "F",
|
||||
"contact": {
|
||||
"phone": "0412345678",
|
||||
"mobile": "0612345678",
|
||||
"mail": "mona.simpson@example.org"
|
||||
},
|
||||
"civility_text": "MADAME",
|
||||
"sexe_text": "F\u00e9minin"
|
||||
},
|
||||
"personQuality": {
|
||||
"code": "13",
|
||||
"libelle": "Famille",
|
||||
"code_text": "Famille"
|
||||
}
|
||||
}
|
||||
],
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "AUTRE",
|
||||
"libelle": "Autre",
|
||||
"note": "rebellious",
|
||||
"choice": null,
|
||||
"code_text": "Autre"
|
||||
},
|
||||
{
|
||||
"code": "LUNETTE",
|
||||
"libelle": "Port de lunettes",
|
||||
"note": null,
|
||||
"choice": null,
|
||||
"code_text": "Port de lunettes"
|
||||
}
|
||||
],
|
||||
"medicalRecord": {
|
||||
"familyDoctor": {
|
||||
"name": "MONROE",
|
||||
"phone": "0612341234",
|
||||
"address": {
|
||||
"street1": "Alameda",
|
||||
"zipcode": "90701",
|
||||
"town": "Springfield"
|
||||
}
|
||||
},
|
||||
"allergy1": "butterscotch, imitation butterscotch, glow-in-the-dark monster make-up",
|
||||
"allergy2": "shrimp and cauliflower",
|
||||
"comment1": "the shrimp allergy isn't fully identified",
|
||||
"comment2": null,
|
||||
"observ1": "Ay Caramba!",
|
||||
"observ2": "Eat my shorts!",
|
||||
"isAuthHospital": false,
|
||||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_01",
|
||||
"dateDeb": "2022-09-01T00:00:00+02:00",
|
||||
"dateFin": "2023-07-01T00:00:00+02:00",
|
||||
"description": "mischievous, rebellious, misunderstood, disruptive",
|
||||
"code_text": "PAI Alimentaire Int\u00e9gral"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "Masculin",
|
||||
"dietcode_text": "Avec viande"
|
||||
},
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "LISA",
|
||||
"sexe": "F",
|
||||
"birth": {
|
||||
"dateBirth": "2016-05-09T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_SV",
|
||||
"bPhoto": false,
|
||||
"bLeaveAlone": false,
|
||||
"authorizedPersonList": [],
|
||||
"indicatorList": [],
|
||||
"medicalRecord": null,
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_02",
|
||||
"dateDeb": null,
|
||||
"dateFin": null,
|
||||
"description": null,
|
||||
"code_text": "PAI Alimentaire Partiel"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "F\u00e9minin",
|
||||
"dietcode_text": "Sans viande"
|
||||
},
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "MAGGIE",
|
||||
"sexe": "F",
|
||||
"birth": {
|
||||
"dateBirth": "2018-12-17T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_PAI",
|
||||
"bPhoto": false,
|
||||
"bLeaveAlone": false,
|
||||
"authorizedPersonList": [],
|
||||
"indicatorList": [],
|
||||
"medicalRecord": null,
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_02",
|
||||
"dateDeb": null,
|
||||
"dateFin": null,
|
||||
"description": null,
|
||||
"code_text": "PAI Alimentaire Partiel"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "F\u00e9minin",
|
||||
"dietcode_text": "Panier PAI"
|
||||
},
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "HUGO",
|
||||
"sexe": "M",
|
||||
"birth": {
|
||||
"dateBirth": "2018-04-01T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": false,
|
||||
"bLeaveAlone": false,
|
||||
"authorizedPersonList": [],
|
||||
"indicatorList": [],
|
||||
"medicalRecord": null,
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_01",
|
||||
"dateDeb": null,
|
||||
"dateFin": null,
|
||||
"description": null,
|
||||
"code_text": "PAI Alimentaire Int\u00e9gral"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "Masculin",
|
||||
"dietcode_text": "Avec viande"
|
||||
}
|
||||
],
|
||||
"emergencyPersonList": [
|
||||
{
|
||||
"numPerson": "N/A",
|
||||
"civility": "MME",
|
||||
"firstname": "PATTY",
|
||||
"lastname": "BOUVIER",
|
||||
"dateBirth": "1948-08-30T00:00:00+01:00",
|
||||
"sexe": "F",
|
||||
"quality": "13",
|
||||
"contact": {
|
||||
"phone": "0112345678",
|
||||
"mobile": "0612345678",
|
||||
"mail": "patty.bouvier@example.org"
|
||||
},
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "Famille",
|
||||
"sexe_text": "F\u00e9minin"
|
||||
},
|
||||
{
|
||||
"numPerson": "N/A",
|
||||
"civility": "MME",
|
||||
"firstname": "SELMA",
|
||||
"lastname": "BOUVIER",
|
||||
"dateBirth": "1946-04-29T00:00:00+01:00",
|
||||
"sexe": "F",
|
||||
"quality": "13",
|
||||
"contact": {
|
||||
"phone": "0112345678",
|
||||
"mobile": "0612345678",
|
||||
"mail": "selma.bouvier@example.org"
|
||||
},
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "Famille",
|
||||
"sexe_text": "F\u00e9minin"
|
||||
}
|
||||
],
|
||||
"indicatorList": [],
|
||||
"childErrorList": [],
|
||||
"category_text": "BIPARENTALE",
|
||||
"situation_text": "MARIE(E)",
|
||||
"family_id": "N/A"
|
||||
}
|
|
@ -12,8 +12,7 @@
|
|||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
"cdDepartment_text": "CORREZE"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
|
|
|
@ -1,125 +0,0 @@
|
|||
[
|
||||
{
|
||||
"id": "INDI_APE_ENF",
|
||||
"text": "INDI_APE_ENF",
|
||||
"level": "INDI_APE_ENF",
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "APE_COMPO3",
|
||||
"libelle": "CF-0/1 actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HBOTH",
|
||||
"libelle": "SP-handicap parent et fratrie",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HPAR",
|
||||
"libelle": "SP-handicap parents",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_MULTIACC",
|
||||
"libelle": "CF-2 enfants \u00e0 accueillir",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_SITUP",
|
||||
"libelle": "SP-situation particuli\u00e8re personne",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "INDI_APE_FAM",
|
||||
"text": "INDI_APE_FAM",
|
||||
"level": "INDI_APE_FAM",
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "APE_COMPO2",
|
||||
"libelle": "CF-1/2 actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_COMPO4",
|
||||
"libelle": "CF-0/2 actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_FIRSTC",
|
||||
"libelle": "CF-premier enfant",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HAND",
|
||||
"libelle": "H-handicap ou maladie chronique",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_NAIM",
|
||||
"libelle": "CF-naissance multiple",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "INDI_APE_RES",
|
||||
"text": "INDI_APE_RES",
|
||||
"level": "INDI_APE_RES",
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "APE_COMPO1",
|
||||
"libelle": "CF-100% actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_FRAT",
|
||||
"libelle": "CF-Fratrie d\u00e9j\u00e0 en accueil",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_KOFRAT",
|
||||
"libelle": "CF-sans proposition pour une partie de la fratrie",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HFRAT",
|
||||
"libelle": "SP-handicap fratrie",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_SPLOG",
|
||||
"libelle": "SP-situation particuli\u00e8re logement",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_ALLO",
|
||||
"libelle": "SP-accompagnement enfant allophone",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE-MINE",
|
||||
"libelle": "SP-parent mineur",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
|
@ -1,17 +1,9 @@
|
|||
[
|
||||
{
|
||||
"id": "AUT_OUTADL",
|
||||
"code": "AUT_OUTADL",
|
||||
"text": "Autorisation de sortie - ADL",
|
||||
"libelle": "Autorisation de sortie - ADL",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AUTO_OUT",
|
||||
"code": "AUTO_OUT",
|
||||
"text": "Autorisation de sortie - CLAE",
|
||||
"libelle": "Autorisation de sortie - CLAE",
|
||||
"id": "AVS",
|
||||
"code": "AVS",
|
||||
"text": "Assistant de Vie scolaire",
|
||||
"libelle": "Assistant de Vie scolaire ",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
|
@ -31,14 +23,6 @@
|
|||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AVS",
|
||||
"code": "AVS",
|
||||
"text": "Auxiliaire de Vie scolaire",
|
||||
"libelle": "Auxiliaire de Vie scolaire ",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "ETABSPEC",
|
||||
"code": "ETABSPEC",
|
||||
|
@ -47,30 +31,6 @@
|
|||
"typeDesc": "NOTE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "HPURG",
|
||||
"code": "HPURG",
|
||||
"text": "Hospitalisation / musures d'urgence",
|
||||
"libelle": "Hospitalisation / musures d'urgence",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AUT_SANT",
|
||||
"code": "AUT_SANT",
|
||||
"text": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"libelle": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AUT_TRANS",
|
||||
"code": "AUT_TRANS",
|
||||
"text": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"libelle": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "MDPH",
|
||||
"code": "MDPH",
|
||||
|
|
|
@ -1,10 +1,4 @@
|
|||
[
|
||||
{
|
||||
"id": "MORAL",
|
||||
"code": "MORAL",
|
||||
"text": "",
|
||||
"libelle": null
|
||||
},
|
||||
{
|
||||
"id": "MME",
|
||||
"code": "MME",
|
||||
|
@ -16,5 +10,11 @@
|
|||
"code": "MR",
|
||||
"text": "MONSIEUR",
|
||||
"libelle": "MONSIEUR"
|
||||
},
|
||||
{
|
||||
"id": "MORAL",
|
||||
"code": "MORAL",
|
||||
"text": "MORAL",
|
||||
"libelle": "MORAL"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -1,11 +1,4 @@
|
|||
[
|
||||
{
|
||||
"id": "87",
|
||||
"code": "87",
|
||||
"rang": "PERSON",
|
||||
"text": "Acte de d\u00e9c\u00e8s",
|
||||
"libelle": "Acte de d\u00e9c\u00e8s"
|
||||
},
|
||||
{
|
||||
"id": "43",
|
||||
"code": "43",
|
||||
|
@ -195,13 +188,6 @@
|
|||
"text": "Certificat de scolarit\u00e9",
|
||||
"libelle": "Certificat de scolarit\u00e9"
|
||||
},
|
||||
{
|
||||
"id": "93",
|
||||
"code": "93",
|
||||
"rang": "PERSON",
|
||||
"text": "Certificat de travail",
|
||||
"libelle": "Certificat de travail"
|
||||
},
|
||||
{
|
||||
"id": "74",
|
||||
"code": "74",
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
[
|
||||
{
|
||||
"id": "05DERO-8",
|
||||
"code": "05DERO-8",
|
||||
"text": "DERO05 - SANTE",
|
||||
"libelle": "DERO05 - SANTE"
|
||||
},
|
||||
{
|
||||
"id": "05DERO-6",
|
||||
"code": "05DERO-6",
|
||||
"text": "DERO05 - SANTE : SANTE / ORGANISATION",
|
||||
"libelle": "DERO05 - SANTE : SANTE / ORGANISATION"
|
||||
},
|
||||
{
|
||||
"id": "10DERO-2",
|
||||
"code": "10DERO-2",
|
||||
"text": "DERO10 - ORGANISATION",
|
||||
"libelle": "DERO10 - ORGANISATION"
|
||||
},
|
||||
{
|
||||
"id": "11DERO-1",
|
||||
"code": "11DERO-1",
|
||||
"text": "DERO11 - AUTRE",
|
||||
"libelle": "DERO11 - AUTRE"
|
||||
}
|
||||
]
|
|
@ -1,56 +0,0 @@
|
|||
[
|
||||
{
|
||||
"id": 102,
|
||||
"code": 102,
|
||||
"text": "CANTINE / CLAE",
|
||||
"libelle": "CANTINE / CLAE"
|
||||
},
|
||||
{
|
||||
"id": 103,
|
||||
"code": 103,
|
||||
"text": "CCAS",
|
||||
"libelle": "CCAS"
|
||||
},
|
||||
{
|
||||
"id": 101,
|
||||
"code": 101,
|
||||
"text": "DASC",
|
||||
"libelle": "DASC"
|
||||
},
|
||||
{
|
||||
"id": 104,
|
||||
"code": 104,
|
||||
"text": "DSCS",
|
||||
"libelle": "DSCS"
|
||||
},
|
||||
{
|
||||
"id": 105,
|
||||
"code": 105,
|
||||
"text": "ENFANCE LOISIRS",
|
||||
"libelle": "ENFANCE LOISIRS"
|
||||
},
|
||||
{
|
||||
"id": 106,
|
||||
"code": 106,
|
||||
"text": "PARCOURS EDUCATIFS",
|
||||
"libelle": "PARCOURS EDUCATIFS"
|
||||
},
|
||||
{
|
||||
"id": 107,
|
||||
"code": 107,
|
||||
"text": "REMBOURSEMENT",
|
||||
"libelle": "REMBOURSEMENT"
|
||||
},
|
||||
{
|
||||
"id": 108,
|
||||
"code": 108,
|
||||
"text": "SENIORS",
|
||||
"libelle": "SENIORS"
|
||||
},
|
||||
{
|
||||
"id": 109,
|
||||
"code": 109,
|
||||
"text": "SPORT",
|
||||
"libelle": "SPORT"
|
||||
}
|
||||
]
|
|
@ -1,4 +1,12 @@
|
|||
[
|
||||
{
|
||||
"id": "AVS",
|
||||
"code": "AVS",
|
||||
"text": "Assistant de Vie scolaire",
|
||||
"libelle": "Assistant de Vie scolaire ",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AVL",
|
||||
"code": "AVL",
|
||||
|
@ -7,14 +15,6 @@
|
|||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AVS",
|
||||
"code": "AVS",
|
||||
"text": "Auxiliaire de Vie scolaire",
|
||||
"libelle": "Auxiliaire de Vie scolaire ",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "ETABSPEC",
|
||||
"code": "ETABSPEC",
|
||||
|
|
|
@ -1,92 +0,0 @@
|
|||
[
|
||||
{
|
||||
"id": "CE1",
|
||||
"age": 7,
|
||||
"code": "CE1",
|
||||
"text": "Cours \u00e9l\u00e9mentaire 1",
|
||||
"nature": null,
|
||||
"libelle": "Cours \u00e9l\u00e9mentaire 1",
|
||||
"numOrder": "6",
|
||||
"nextLevelCode": "CE2"
|
||||
},
|
||||
{
|
||||
"id": "CE2",
|
||||
"age": 8,
|
||||
"code": "CE2",
|
||||
"text": "Cours \u00e9l\u00e9mentaire 2",
|
||||
"nature": null,
|
||||
"libelle": "Cours \u00e9l\u00e9mentaire 2",
|
||||
"numOrder": "7",
|
||||
"nextLevelCode": "CM1"
|
||||
},
|
||||
{
|
||||
"id": "CM1",
|
||||
"age": 9,
|
||||
"code": "CM1",
|
||||
"text": "Cours moyen 1",
|
||||
"nature": null,
|
||||
"libelle": "Cours moyen 1",
|
||||
"numOrder": "8",
|
||||
"nextLevelCode": "CM2"
|
||||
},
|
||||
{
|
||||
"id": "CM2",
|
||||
"age": 10,
|
||||
"code": "CM2",
|
||||
"text": "Cours moyen 2",
|
||||
"nature": null,
|
||||
"libelle": "Cours moyen 2",
|
||||
"numOrder": "9",
|
||||
"nextLevelCode": null
|
||||
},
|
||||
{
|
||||
"id": "CP",
|
||||
"age": 6,
|
||||
"code": "CP",
|
||||
"text": "Cours pr\u00e9paratoire",
|
||||
"nature": null,
|
||||
"libelle": "Cours pr\u00e9paratoire",
|
||||
"numOrder": "5",
|
||||
"nextLevelCode": "CE1"
|
||||
},
|
||||
{
|
||||
"id": "GS",
|
||||
"age": 5,
|
||||
"code": "GS",
|
||||
"text": "Section grand",
|
||||
"nature": null,
|
||||
"libelle": "Section grand",
|
||||
"numOrder": "4",
|
||||
"nextLevelCode": "CP"
|
||||
},
|
||||
{
|
||||
"id": "MS",
|
||||
"age": 4,
|
||||
"code": "MS",
|
||||
"text": "Section moyen",
|
||||
"nature": null,
|
||||
"libelle": "Section moyen",
|
||||
"numOrder": "3",
|
||||
"nextLevelCode": "GS"
|
||||
},
|
||||
{
|
||||
"id": "PS",
|
||||
"age": 3,
|
||||
"code": "PS",
|
||||
"text": "Section petit",
|
||||
"nature": null,
|
||||
"libelle": "Section petit",
|
||||
"numOrder": "2",
|
||||
"nextLevelCode": "MS"
|
||||
},
|
||||
{
|
||||
"id": "TPS",
|
||||
"age": 2,
|
||||
"code": "TPS",
|
||||
"text": "Section tout petit",
|
||||
"nature": null,
|
||||
"libelle": "Section tout petit",
|
||||
"numOrder": "1",
|
||||
"nextLevelCode": "PS"
|
||||
}
|
||||
]
|
|
@ -1,20 +0,0 @@
|
|||
[
|
||||
{
|
||||
"id": 2022,
|
||||
"text": "2022",
|
||||
"schoolYear": 2022,
|
||||
"dateEndYearSchool": "2023-07-07T00:00:00+02:00",
|
||||
"dateStartYearSchool": "2022-09-01T00:00:00+02:00",
|
||||
"dateEndSubscribeSchool": "2023-09-01T00:00:00+02:00",
|
||||
"dateStartSubscribeSchool": "2022-09-01T00:00:00+02:00"
|
||||
},
|
||||
{
|
||||
"id": 2023,
|
||||
"text": "2023",
|
||||
"schoolYear": 2023,
|
||||
"dateEndYearSchool": "2024-07-07T00:00:00+02:00",
|
||||
"dateStartYearSchool": "2023-09-04T00:00:00+02:00",
|
||||
"dateEndSubscribeSchool": "2023-09-01T00:00:00+02:00",
|
||||
"dateStartSubscribeSchool": "2022-09-01T00:00:00+02:00"
|
||||
}
|
||||
]
|
|
@ -1,9 +1,33 @@
|
|||
[
|
||||
{
|
||||
"id": "105",
|
||||
"code": "105",
|
||||
"text": "AUTRE",
|
||||
"libelle": "AUTRE"
|
||||
},
|
||||
{
|
||||
"id": "30",
|
||||
"code": "30",
|
||||
"text": "BCG",
|
||||
"libelle": "BCG"
|
||||
"text": "B.C.G.",
|
||||
"libelle": "B.C.G."
|
||||
},
|
||||
{
|
||||
"id": "56",
|
||||
"code": "56",
|
||||
"text": "BOOSTRIX",
|
||||
"libelle": "BOOSTRIX"
|
||||
},
|
||||
{
|
||||
"id": "27",
|
||||
"code": "27",
|
||||
"text": "CHOLERA",
|
||||
"libelle": "CHOLERA"
|
||||
},
|
||||
{
|
||||
"id": "48",
|
||||
"code": "48",
|
||||
"text": "Contr\u00f4le B.C.G.",
|
||||
"libelle": "Contr\u00f4le B.C.G."
|
||||
},
|
||||
{
|
||||
"id": "3",
|
||||
|
@ -17,17 +41,107 @@
|
|||
"text": "DIPHTERIE",
|
||||
"libelle": "DIPHTERIE"
|
||||
},
|
||||
{
|
||||
"id": "6",
|
||||
"code": "6",
|
||||
"text": "DIPHTERIE TETANOS",
|
||||
"libelle": "DIPHTERIE TETANOS"
|
||||
},
|
||||
{
|
||||
"id": "9",
|
||||
"code": "9",
|
||||
"text": "DIPHT TETANOS COQ",
|
||||
"libelle": "DIPHT TETANOS COQ"
|
||||
},
|
||||
{
|
||||
"id": "19",
|
||||
"code": "19",
|
||||
"text": "DT BISRUDIVAX",
|
||||
"libelle": "DT BISRUDIVAX"
|
||||
},
|
||||
{
|
||||
"id": "10",
|
||||
"code": "10",
|
||||
"text": "DT COQ POLIO",
|
||||
"libelle": "DT COQ POLIO"
|
||||
},
|
||||
{
|
||||
"id": "13",
|
||||
"code": "13",
|
||||
"text": "DT COQ POLIO IPAD",
|
||||
"libelle": "DT COQ POLIO IPAD"
|
||||
},
|
||||
{
|
||||
"id": "8",
|
||||
"code": "8",
|
||||
"text": "DTPOLIO",
|
||||
"libelle": "DTPOLIO"
|
||||
"text": "DT POLIO",
|
||||
"libelle": "DT POLIO"
|
||||
},
|
||||
{
|
||||
"id": "45",
|
||||
"code": "45",
|
||||
"text": "DT TETANOS COQ",
|
||||
"libelle": "DT TETANOS COQ"
|
||||
},
|
||||
{
|
||||
"id": "11",
|
||||
"code": "11",
|
||||
"text": "DT TYPHOIDE",
|
||||
"libelle": "DT TYPHOIDE"
|
||||
},
|
||||
{
|
||||
"id": "129",
|
||||
"code": "129",
|
||||
"text": "ENGERIX",
|
||||
"libelle": "ENGERIX"
|
||||
},
|
||||
{
|
||||
"id": "26",
|
||||
"code": "26",
|
||||
"text": "FIEVRE JAUNE",
|
||||
"libelle": "FIEVRE JAUNE"
|
||||
},
|
||||
{
|
||||
"id": "4",
|
||||
"code": "4",
|
||||
"text": "F.TYPHOIDES",
|
||||
"libelle": "F.TYPHOIDES"
|
||||
},
|
||||
{
|
||||
"id": "144",
|
||||
"code": "144",
|
||||
"text": "GRIPPE",
|
||||
"libelle": "GRIPPE"
|
||||
},
|
||||
{
|
||||
"id": "143",
|
||||
"code": "143",
|
||||
"text": "HAEMOPHILUS HIB",
|
||||
"libelle": "HAEMOPHILUS HIB"
|
||||
},
|
||||
{
|
||||
"id": "17",
|
||||
"code": "17",
|
||||
"text": "HAVRIX",
|
||||
"libelle": "HAVRIX"
|
||||
},
|
||||
{
|
||||
"id": "29",
|
||||
"code": "29",
|
||||
"text": "HEPATITEB",
|
||||
"libelle": "HEPATITEB"
|
||||
"text": "HEPATITE B",
|
||||
"libelle": "HEPATITE B"
|
||||
},
|
||||
{
|
||||
"id": "146",
|
||||
"code": "146",
|
||||
"text": "HEXAXIM",
|
||||
"libelle": "HEXAXIM"
|
||||
},
|
||||
{
|
||||
"id": "59",
|
||||
"code": "59",
|
||||
"text": "HEXYON",
|
||||
"libelle": "HEXYON"
|
||||
},
|
||||
{
|
||||
"id": "16",
|
||||
|
@ -36,28 +150,226 @@
|
|||
"libelle": "HIB"
|
||||
},
|
||||
{
|
||||
"id": "152",
|
||||
"code": "152",
|
||||
"text": "IIP",
|
||||
"libelle": "IIP"
|
||||
"id": "24",
|
||||
"code": "24",
|
||||
"text": "IMOVAX OREILLONS",
|
||||
"libelle": "IMOVAX OREILLONS"
|
||||
},
|
||||
{
|
||||
"id": "151",
|
||||
"code": "151",
|
||||
"text": "MENINGOCOQUE",
|
||||
"libelle": "MENINGOCOQUE"
|
||||
"id": "121",
|
||||
"code": "121",
|
||||
"text": "INFANRIX",
|
||||
"libelle": "INFANRIX"
|
||||
},
|
||||
{
|
||||
"id": "150",
|
||||
"code": "150",
|
||||
"text": "POLIO",
|
||||
"libelle": "POLIO"
|
||||
"id": "52",
|
||||
"code": "52",
|
||||
"text": "INFANRIX HEXA",
|
||||
"libelle": "INFANRIX HEXA"
|
||||
},
|
||||
{
|
||||
"id": "32",
|
||||
"code": "32",
|
||||
"text": "INFANRIX POLIO",
|
||||
"libelle": "INFANRIX POLIO"
|
||||
},
|
||||
{
|
||||
"id": "33",
|
||||
"code": "33",
|
||||
"text": "INFANRIX POLIO HIB",
|
||||
"libelle": "INFANRIX POLIO HIB"
|
||||
},
|
||||
{
|
||||
"id": "51",
|
||||
"code": "51",
|
||||
"text": "INFANRIX QUINTA",
|
||||
"libelle": "INFANRIX QUINTA"
|
||||
},
|
||||
{
|
||||
"id": "55",
|
||||
"code": "55",
|
||||
"text": "INFANRIX TETRA",
|
||||
"libelle": "INFANRIX TETRA"
|
||||
},
|
||||
{
|
||||
"id": "147",
|
||||
"code": "147",
|
||||
"text": "INFLUVAC TETRA",
|
||||
"libelle": "INFLUVAC TETRA"
|
||||
},
|
||||
{
|
||||
"id": "137",
|
||||
"code": "137",
|
||||
"text": "INNUGRIP",
|
||||
"libelle": "INNUGRIP"
|
||||
},
|
||||
{
|
||||
"id": "18",
|
||||
"code": "18",
|
||||
"text": "LEPTOSPIROSE",
|
||||
"libelle": "LEPTOSPIROSE"
|
||||
},
|
||||
{
|
||||
"id": "22",
|
||||
"code": "22",
|
||||
"text": "MENINGITE",
|
||||
"libelle": "MENINGITE"
|
||||
},
|
||||
{
|
||||
"id": "130",
|
||||
"code": "130",
|
||||
"text": "MENINGITEC",
|
||||
"libelle": "MENINGITEC"
|
||||
},
|
||||
{
|
||||
"id": "123",
|
||||
"code": "123",
|
||||
"text": "MENINVAC",
|
||||
"libelle": "MENINVAC"
|
||||
},
|
||||
{
|
||||
"id": "120",
|
||||
"code": "120",
|
||||
"text": "MENINVACT",
|
||||
"libelle": "MENINVACT"
|
||||
},
|
||||
{
|
||||
"id": "139",
|
||||
"code": "139",
|
||||
"text": "MENJUGATE",
|
||||
"libelle": "MENJUGATE"
|
||||
},
|
||||
{
|
||||
"id": "149",
|
||||
"code": "149",
|
||||
"text": "M-M RVAX PRO",
|
||||
"libelle": "M-M RVAX PRO"
|
||||
},
|
||||
{
|
||||
"id": "133",
|
||||
"code": "133",
|
||||
"text": "MONOTEST",
|
||||
"libelle": "MONOTEST"
|
||||
},
|
||||
{
|
||||
"id": "124",
|
||||
"code": "124",
|
||||
"text": "MONOVAX",
|
||||
"libelle": "MONOVAX"
|
||||
},
|
||||
{
|
||||
"id": "132",
|
||||
"code": "132",
|
||||
"text": "NEISVAC",
|
||||
"libelle": "NEISVAC"
|
||||
},
|
||||
{
|
||||
"id": "110",
|
||||
"code": "110",
|
||||
"text": "OTITE",
|
||||
"libelle": "OTITE"
|
||||
},
|
||||
{
|
||||
"id": "134",
|
||||
"code": "134",
|
||||
"text": "PANENZA",
|
||||
"libelle": "PANENZA"
|
||||
},
|
||||
{
|
||||
"id": "31",
|
||||
"code": "31",
|
||||
"text": "PENTACOQ",
|
||||
"libelle": "PENTACOQ"
|
||||
},
|
||||
{
|
||||
"id": "53",
|
||||
"code": "53",
|
||||
"text": "PENTAVAC",
|
||||
"libelle": "PENTAVAC"
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
"code": "2",
|
||||
"text": "POLIOMYELITE",
|
||||
"libelle": "POLIOMYELITE"
|
||||
},
|
||||
{
|
||||
"id": "128",
|
||||
"code": "128",
|
||||
"text": "PREVENAR",
|
||||
"libelle": "PREVENAR"
|
||||
},
|
||||
{
|
||||
"id": "125",
|
||||
"code": "125",
|
||||
"text": "PRIORIX",
|
||||
"libelle": "PRIORIX"
|
||||
},
|
||||
{
|
||||
"id": "54",
|
||||
"code": "54",
|
||||
"text": "REPEVAX",
|
||||
"libelle": "REPEVAX"
|
||||
},
|
||||
{
|
||||
"id": "47",
|
||||
"code": "47",
|
||||
"text": "REVAXIS",
|
||||
"libelle": "REVAXIS"
|
||||
},
|
||||
{
|
||||
"id": "28",
|
||||
"code": "28",
|
||||
"text": "ROR",
|
||||
"libelle": "ROR"
|
||||
"text": "R O R",
|
||||
"libelle": "R O R"
|
||||
},
|
||||
{
|
||||
"id": "127",
|
||||
"code": "127",
|
||||
"text": "ROR VAX",
|
||||
"libelle": "ROR VAX"
|
||||
},
|
||||
{
|
||||
"id": "135",
|
||||
"code": "135",
|
||||
"text": "ROTARIX",
|
||||
"libelle": "ROTARIX"
|
||||
},
|
||||
{
|
||||
"id": "20",
|
||||
"code": "20",
|
||||
"text": "ROUVAX",
|
||||
"libelle": "ROUVAX"
|
||||
},
|
||||
{
|
||||
"id": "23",
|
||||
"code": "23",
|
||||
"text": "RUDI ROUVAX",
|
||||
"libelle": "RUDI ROUVAX"
|
||||
},
|
||||
{
|
||||
"id": "21",
|
||||
"code": "21",
|
||||
"text": "RUDIVAX",
|
||||
"libelle": "RUDIVAX"
|
||||
},
|
||||
{
|
||||
"id": "113",
|
||||
"code": "113",
|
||||
"text": "SCARLATINE",
|
||||
"libelle": "SCARLATINE"
|
||||
},
|
||||
{
|
||||
"id": "14",
|
||||
"code": "14",
|
||||
"text": "SERUM ANTI-TETANIQUE",
|
||||
"libelle": "SERUM ANTI-TETANIQUE"
|
||||
},
|
||||
{
|
||||
"id": "141",
|
||||
"code": "141",
|
||||
"text": "SYNAGIS",
|
||||
"libelle": "SYNAGIS"
|
||||
},
|
||||
{
|
||||
"id": "1",
|
||||
|
@ -65,10 +377,46 @@
|
|||
"text": "TETANOS",
|
||||
"libelle": "TETANOS"
|
||||
},
|
||||
{
|
||||
"id": "7",
|
||||
"code": "7",
|
||||
"text": "TETANOS POLIO",
|
||||
"libelle": "TETANOS POLIO"
|
||||
},
|
||||
{
|
||||
"id": "12",
|
||||
"code": "12",
|
||||
"text": "TETRACOQ",
|
||||
"libelle": "TETRACOQ"
|
||||
"text": "TETRA COQ",
|
||||
"libelle": "TETRA COQ"
|
||||
},
|
||||
{
|
||||
"id": "46",
|
||||
"code": "46",
|
||||
"text": "TETRAVAC ACELLULAIRE",
|
||||
"libelle": "TETRAVAC ACELLULAIRE"
|
||||
},
|
||||
{
|
||||
"id": "107",
|
||||
"code": "107",
|
||||
"text": "VARICELLE",
|
||||
"libelle": "VARICELLE"
|
||||
},
|
||||
{
|
||||
"id": "15",
|
||||
"code": "15",
|
||||
"text": "VARIOLE",
|
||||
"libelle": "VARIOLE"
|
||||
},
|
||||
{
|
||||
"id": "34",
|
||||
"code": "34",
|
||||
"text": "VAXELIS",
|
||||
"libelle": "VAXELIS"
|
||||
},
|
||||
{
|
||||
"id": "148",
|
||||
"code": "148",
|
||||
"text": "VAXIGRIP",
|
||||
"libelle": "VAXIGRIP"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -10,8 +10,8 @@
|
|||
{
|
||||
"id": "AVS",
|
||||
"code": "AVS",
|
||||
"text": "Auxiliaire de Vie scolaire",
|
||||
"libelle": "Auxiliaire de Vie scolaire ",
|
||||
"text": "Assistant de Vie scolaire",
|
||||
"libelle": "Assistant de Vie scolaire ",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"lastname": "SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
|
@ -40,7 +40,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"lastname": "SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
@ -96,13 +96,13 @@
|
|||
"hospital": "Springfield General Hospital",
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
{
|
||||
"familyDoctor": {
|
||||
"name": "HIBBERT",
|
||||
"phone": "0656785678",
|
||||
"address": {
|
||||
"street1": "General Hospital",
|
||||
"zipcode": "90701",
|
||||
"town": "Springfield"
|
||||
}
|
||||
},
|
||||
"allergy1": null,
|
||||
"allergy2": null,
|
||||
"comment1": null,
|
||||
"comment2": null,
|
||||
"observ1": null,
|
||||
"observ2": null,
|
||||
"isAuthHospital": true,
|
||||
"hospital": "Springfield General Hospital",
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -10,13 +10,13 @@
|
|||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"lastname": "SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
|
|
@ -27,8 +27,8 @@
|
|||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"town": "Springfield",
|
||||
"zipcode": "62701",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
|
@ -61,8 +61,7 @@
|
|||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
"cdDepartment_text": "CORREZE"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
|
@ -136,8 +135,7 @@
|
|||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
"cdDepartment_text": "CORREZE"
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": true,
|
||||
|
@ -146,7 +144,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"lastname": "SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
|
@ -168,7 +166,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"lastname": "SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
@ -224,13 +222,13 @@
|
|||
"hospital": "Springfield General Hospital",
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"number": "N/A",
|
||||
"category": "AUTR",
|
||||
"situation": "AUTR",
|
||||
"flagCom": false,
|
||||
"flagCom": true,
|
||||
"nbChild": 0,
|
||||
"nbTotalChild": 0,
|
||||
"nbAES": "0",
|
||||
|
@ -153,13 +153,13 @@
|
|||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -4,33 +4,24 @@
|
|||
"dateStart": "2022-01-02T00:00:00+01:00",
|
||||
"dateEnd": "2022-12-31T00:00:00+01:00",
|
||||
"mtt": 1500.33,
|
||||
"cdquo": "2",
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus Petite enfance"
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
},
|
||||
{
|
||||
"yearRev": 2021,
|
||||
"dateStart": "2022-01-01T00:00:00+01:00",
|
||||
"dateEnd": "2022-01-01T00:00:00+01:00",
|
||||
"mtt": 1500.33,
|
||||
"cdquo": "2",
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus Petite enfance"
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
},
|
||||
{
|
||||
"yearRev": 2021,
|
||||
"dateStart": "2022-01-02T00:00:00+01:00",
|
||||
"dateEnd": "2022-12-31T00:00:00+01:00",
|
||||
"mtt": 1500.33,
|
||||
"cdquo": "2",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus Petite enfance"
|
||||
},
|
||||
{
|
||||
"yearRev": 2022,
|
||||
"dateStart": "2022-09-01T00:00:00+02:00",
|
||||
"dateEnd": "3000-08-31T00:00:00+02:00",
|
||||
"mtt": 5000.0,
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
|
|
|
@ -18,8 +18,8 @@
|
|||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"town": "Springfield",
|
||||
"zipcode": "62701",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
|
|
|
@ -5,9 +5,8 @@ from .conftest import diff
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'ref',
|
||||
"ref",
|
||||
[
|
||||
'ape-indicators',
|
||||
'category',
|
||||
'child-indicator',
|
||||
'civility',
|
||||
|
@ -16,16 +15,11 @@ from .conftest import diff
|
|||
'csp',
|
||||
'dietcode',
|
||||
'document',
|
||||
'exemption-reasons',
|
||||
#'nursery',
|
||||
'organ',
|
||||
'pai',
|
||||
'quality',
|
||||
'quotient',
|
||||
#'regie',
|
||||
'rl-indicator',
|
||||
'school-levels',
|
||||
'school-years',
|
||||
'situation',
|
||||
'street',
|
||||
'vaccin',
|
||||
|
@ -41,5 +35,5 @@ def test_referentials(conn, referentials, ref):
|
|||
for item in res['data']:
|
||||
assert 'id' in item
|
||||
assert 'text' in item
|
||||
if ref not in ['street', 'county', 'nursery']:
|
||||
if ref not in ['street', 'county']:
|
||||
assert diff(res['data'], 'test_read_%s_list.json' % ref)
|
||||
|
|
|
@ -36,7 +36,7 @@ def test_link(conn, update_data):
|
|||
res = resp.json()
|
||||
assert res['err'] == 1
|
||||
assert res['err_class'] == 'passerelle.utils.soap.SOAPFault'
|
||||
assert 'E02 : Le dossier numéro [999999] ne correspond à aucune famille' in res['err_desc']
|
||||
assert "E02 : Le dossier numéro [999999] ne correspond à aucune famille" in res['err_desc']
|
||||
|
||||
# wrong DUI firstname
|
||||
payload = {
|
||||
|
|
|
@ -15,7 +15,7 @@ FAMILY_RESET_PAYLOAD = {
|
|||
'rl1': {
|
||||
'civility': 'MR', # no effect
|
||||
'firstname': 'Marge', # must be
|
||||
'lastname': 'Test_Simpson', # must be
|
||||
'lastname': 'Simpson', # must be
|
||||
'maidenName': 'reset', # no effect
|
||||
'quality': 'AU',
|
||||
'birth': {
|
||||
|
@ -27,7 +27,7 @@ FAMILY_RESET_PAYLOAD = {
|
|||
'rl2': {
|
||||
'civility': 'MME', # no effect
|
||||
'firstname': 'Homer', # must be
|
||||
'lastname': 'Test_Simpson', # must be
|
||||
'lastname': 'Simpson', # must be
|
||||
'quality': 'AU',
|
||||
'birth': {
|
||||
'dateBirth': '1956-05-12', # must be
|
||||
|
@ -236,18 +236,13 @@ def test_update_family(conn, update_data):
|
|||
|
||||
def test_create_family(conn, create_data, update_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
|
||||
# search the 'Test_Simpson' default test family
|
||||
resp = requests.get(conn + '/search-family?q=Test_Simpson')
|
||||
resp.raise_for_status()
|
||||
assert len(resp.json()['data']) >= 1
|
||||
assert any(data['RL1']['lastname'] == 'TEST_SIMPSON' for data in resp.json()['data'])
|
||||
|
||||
link(conn, create_data)
|
||||
url = conn + '/create-family?NameID=%s' % create_data['name_id']
|
||||
|
||||
# RL1 already exists (on update_data) error
|
||||
unlink(conn, create_data['name_id'])
|
||||
payload = copy.deepcopy(create_data['family_payload'])
|
||||
payload['rl1']['lastname'] = 'Test_Simpson'
|
||||
payload['rl1']['lastname'] = 'Simpson'
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
|
@ -268,7 +263,7 @@ def test_create_family(conn, create_data, update_data):
|
|||
|
||||
def test_is_rl_exists(conn, update_data):
|
||||
url = conn + '/is-rl-exists'
|
||||
payload = {'firstname': 'Marge', 'lastname': 'Test_Simpson', 'dateBirth': '1950-10-01'}
|
||||
payload = {'firstname': 'Marge', 'lastname': 'Simpson', 'dateBirth': '1950-10-01'}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'err': 0, 'data': True}
|
||||
|
@ -285,7 +280,7 @@ def test_is_rl_exists(conn, update_data):
|
|||
assert resp.json() == {'err': 0, 'data': False}
|
||||
|
||||
# test on rl2
|
||||
payload = {'firstname': 'Homer', 'lastname': 'Test_Simpson', 'dateBirth': '1956-05-12'}
|
||||
payload = {'firstname': 'Homer', 'lastname': 'Simpson', 'dateBirth': '1956-05-12'}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'err': 0, 'data': True}
|
||||
|
@ -309,7 +304,7 @@ def test_create_rl2(conn, create_data, update_data):
|
|||
assert diff_rlg(conn, create_data['name_id'], 2, 'test_create_rl2.json')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('rl', ['1', '2'])
|
||||
@pytest.mark.parametrize("rl", ['1', '2'])
|
||||
def test_update_rlg(conn, update_data, rl):
|
||||
rlg = 'rl' + rl
|
||||
RLG = 'RL' + rl
|
||||
|
@ -370,7 +365,7 @@ def test_update_rlg(conn, update_data, rl):
|
|||
in res['err_desc']
|
||||
)
|
||||
else:
|
||||
assert 'La date de naissance ne peut pas être modifiée' in res['err_desc']
|
||||
assert "La date de naissance ne peut pas être modifiée" in res['err_desc']
|
||||
|
||||
# restore RL1
|
||||
payload = copy.deepcopy(update_data['family_payload'][rlg])
|
||||
|
@ -459,7 +454,7 @@ def test_create_child(conn, create_data, update_data):
|
|||
assert 'E65 : Il existe déjà un enfant correspondant' in res['err_desc']
|
||||
|
||||
# child already exists error (Lisa form update_data)
|
||||
payload['lastname'] = 'Test_Simpson'
|
||||
payload['lastname'] = 'Simpson'
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
|
@ -618,24 +613,6 @@ def test_update_child_medical_record(conn, update_data):
|
|||
update_data['bart_num'],
|
||||
)
|
||||
|
||||
# update only doctor
|
||||
# #2720: allergies comments, and observations are erased
|
||||
payload = {
|
||||
'familyDoctor': {
|
||||
'name': 'Hibbert',
|
||||
'phone': '0656785678',
|
||||
'address': {
|
||||
'street1': 'General Hospital',
|
||||
'zipcode': '90701',
|
||||
'town': 'Springfield',
|
||||
},
|
||||
},
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert diff_child(conn, update_data['name_id'], 0, 'test_update_child_doctor.json', key='medicalRecord')
|
||||
|
||||
# reset medical record
|
||||
payload = FAMILY_RESET_PAYLOAD['childList'][0]['medicalRecord']
|
||||
resp = requests.post(url, json=payload)
|
||||
|
@ -799,22 +776,21 @@ def test_update_quotient(conn, create_data):
|
|||
'dateStart': '2022-01-01',
|
||||
'dateEnd': '2022-12-31',
|
||||
'mtt': '1500.33',
|
||||
'cdquo': '2',
|
||||
'cdquo': '1',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = read_family(conn, create_data['name_id'])
|
||||
assert len(data['RL1']['quotientList']) == 2
|
||||
assert data['RL1']['quotients']['2'] == [
|
||||
assert data['RL1']['quotientList'] == [
|
||||
{
|
||||
'yearRev': 2021,
|
||||
'dateStart': '2022-01-01T00:00:00+01:00',
|
||||
'dateEnd': '2022-12-31T00:00:00+01:00',
|
||||
'mtt': 1500.33,
|
||||
'cdquo': '2',
|
||||
'cdquo': '1',
|
||||
'codeUti': None,
|
||||
'cdquo_text': 'Revenus Petite enfance',
|
||||
'cdquo_text': 'Revenus fiscaux',
|
||||
}
|
||||
]
|
||||
|
||||
|
@ -824,7 +800,7 @@ def test_update_quotient(conn, create_data):
|
|||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = read_family(conn, create_data['name_id'])
|
||||
assert len(data['RL1']['quotients']['2']) == 2
|
||||
assert len(data['RL1']['quotientList']) == 2
|
||||
|
||||
# add quotient on another income year
|
||||
payload['yearRev'] = '2020'
|
||||
|
@ -832,7 +808,7 @@ def test_update_quotient(conn, create_data):
|
|||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = diff_rlg(conn, create_data['name_id'], 1, 'test_update_quotient.json', 'quotientList')
|
||||
assert len(data['RL1']['quotients']['2']) == 3
|
||||
assert len(data['RL1']['quotientList']) == 3
|
||||
|
||||
# test read-family with reference year
|
||||
url = conn + '/read-family?NameID=%s&income_year=%s' % (create_data['name_id'], '2020')
|
||||
|
@ -932,7 +908,7 @@ def test_read_family_members(conn, update_data):
|
|||
assert res['data']['personInfo']['firstname'] == 'ABRAHAM JEBEDIAH'
|
||||
|
||||
|
||||
def test_supplied_document(conn, create_data):
|
||||
def test_add_supplied_document(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
|
@ -940,8 +916,6 @@ def test_supplied_document(conn, create_data):
|
|||
payload = {
|
||||
'documentList/0/code': '46',
|
||||
'documentList/0/depositDate': '2022-12-20',
|
||||
'documentList/0/visaDate': '2022-12-21',
|
||||
'documentList/0/validityDate': '2022-12-22',
|
||||
'documentList/0/file': { # w.c.s. file field
|
||||
'filename': '201x201.jpg',
|
||||
'content_type': 'image/jpeg',
|
||||
|
@ -955,7 +929,6 @@ def test_supplied_document(conn, create_data):
|
|||
assert res['err'] == 0
|
||||
|
||||
# push on RL
|
||||
payload['documentList/0/code'] = '85'
|
||||
payload['numPerson'] = create_data['rl1_num']
|
||||
url = conn + '/add-supplied-document?NameID=%s' % create_data['name_id']
|
||||
resp = requests.post(url, json=payload)
|
||||
|
@ -964,43 +937,9 @@ def test_supplied_document(conn, create_data):
|
|||
assert res['err'] == 0
|
||||
|
||||
# push on child
|
||||
payload['documentList/0/code'] = '69'
|
||||
payload['numPerson'] = create_data['bart_num']
|
||||
url = conn + '/add-supplied-document?NameID=%s' % create_data['name_id']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
||||
# check validity on family
|
||||
params = {
|
||||
'code': '46',
|
||||
'ref_date': '2022-12-22',
|
||||
}
|
||||
url = conn + '/read-supplied-document-validity?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
||||
# check validity on RL
|
||||
params = {
|
||||
'code': '85',
|
||||
'person_id': create_data['rl1_num'],
|
||||
'ref_date': '2022-12-22',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
||||
# check validity on child
|
||||
params = {
|
||||
'code': '69',
|
||||
'person_id': create_data['bart_num'],
|
||||
'ref_date': '2022-12-22',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
|
|
@ -0,0 +1,174 @@
|
|||
import datetime
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
def test_perisco(perisco_subscribe_info):
|
||||
assert perisco_subscribe_info['info']['activity']['libelle1'] == 'TEMPS DU MIDI 22/23'
|
||||
|
||||
|
||||
def test_perisco_agenda(conn, create_data, perisco_subscribe_info):
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_info['place']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# find first available booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
booking = None
|
||||
for booking in resp.json()['data']:
|
||||
if booking['disabled'] is False:
|
||||
break
|
||||
else:
|
||||
raise Exception("no booking available")
|
||||
assert booking['details']['activity_id'] == perisco_subscribe_info['activity']['id']
|
||||
assert booking['prefill'] is False
|
||||
|
||||
# book activity
|
||||
url = conn + '/update-child-agenda?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [booking['id']],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {
|
||||
'updated': True,
|
||||
'count': 1,
|
||||
'changes': [
|
||||
{
|
||||
'booked': True,
|
||||
'activity_id': booking['details']['activity_id'],
|
||||
'activity_label': 'Restauration scolaire',
|
||||
'day': booking['details']['day_str'],
|
||||
}
|
||||
],
|
||||
'err': 0,
|
||||
}
|
||||
|
||||
# check booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [x['prefill'] for x in resp.json()['data'] if x['id'] == booking['id']][0] is True
|
||||
|
||||
|
||||
def test_perisco_recurrent_week(conn, create_data, perisco_subscribe_info, reference_year):
|
||||
# no subscribed activity
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 0
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_info['place']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['id'] == perisco_subscribe_info['activity']['id']
|
||||
|
||||
# get recurent-week gabarit
|
||||
url = conn + '/get-recurrent-week?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'ref_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [(x['id'], x['day']) for x in resp.json()['data']] == [
|
||||
('1-X', 'Lundi'),
|
||||
('2-X', 'Mardi'),
|
||||
('4-X', 'Jeudi'),
|
||||
('5-X', 'Vendredi'),
|
||||
]
|
||||
|
||||
# no booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['maggie_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert not any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
# set recurent-week gabarit
|
||||
url = conn + '/update-recurrent-week?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'recurrent_week': ['1-X', '2-X'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
|
||||
# there is now some bookings
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['maggie_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert any(x['prefill'] for x in resp.json()['data'])
|
|
@ -1,192 +0,0 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import link, unlink
|
||||
|
||||
|
||||
def test_create_nursery_demand_on_existing_child(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/get-nursery-geojson'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
nurseries = resp.json()['features']
|
||||
assert len(nurseries) >= 2
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
nb_childs = len(res['data']['childList'])
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == []
|
||||
|
||||
url = conn + '/read-child?NameID=%s&child_id=%s' % (create_data['name_id'], create_data['maggie_num'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == []
|
||||
|
||||
url = conn + '/create-nursery-demand'
|
||||
payload = {
|
||||
'family_id': create_data['family_id'],
|
||||
'family_indicators/0/code': 'APE_FIRSTC',
|
||||
'family_indicators/0/isActive': True,
|
||||
'child_id': create_data['maggie_num'],
|
||||
'demand_indicators/0/code': 'APE_COMPO1',
|
||||
'demand_indicators/0/isActive': True,
|
||||
'start_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
'number_of_days': '2',
|
||||
'start_hour_Mon': '08:00',
|
||||
'end_hour_Mon': '',
|
||||
'comment': 'bla',
|
||||
'accept_other_nurseries': True,
|
||||
'nursery1/idActivity': nurseries[0]['properties']['activity_id'],
|
||||
'nursery1/idUnit': nurseries[0]['properties']['unit_id'],
|
||||
'nursery1/idPlace': nurseries[0]['properties']['place_id'],
|
||||
'nursery2/idActivity': nurseries[1]['properties']['activity_id'],
|
||||
'nursery2/idUnit': nurseries[1]['properties']['unit_id'],
|
||||
'nursery2/idPlace': nurseries[1]['properties']['place_id'],
|
||||
'nursery3/idActivity': '',
|
||||
'nursery3/idUnit': '',
|
||||
'nursery3/idPlace': '',
|
||||
# indicators
|
||||
'child_indicators/0/code': 'APE_HBOTH',
|
||||
'child_indicators/0/isActive': True,
|
||||
'child_indicators/1/code': 'APE_HPAR',
|
||||
'child_indicators/1/isActive': True,
|
||||
'child_indicators/2/code': 'APE_COMPO3',
|
||||
'child_indicators/2/isActive': True,
|
||||
'child_indicators/3/code': 'APE_MULTIACC',
|
||||
'child_indicators/3/isActive': True,
|
||||
'family_indicators/0/code': 'APE_COMPO4',
|
||||
'family_indicators/0/isActive': True,
|
||||
'family_indicators/1/code': 'APE_NAIM',
|
||||
'family_indicators/1/isActive': True,
|
||||
'family_indicators/2/code': 'APE_FIRSTC',
|
||||
'family_indicators/2/isActive': True,
|
||||
'family_indicators/3/code': 'APE_COMPO2',
|
||||
'family_indicators/3/isActive': True,
|
||||
'family_indicators/4/code': 'APE_HAND',
|
||||
'family_indicators/4/isActive': True,
|
||||
'demand_indicators/0/code': 'APE_FRAT',
|
||||
'demand_indicators/0/isActive': True,
|
||||
'demand_indicators/1/code': 'APE_COMPO1',
|
||||
'demand_indicators/1/isActive': True,
|
||||
'demand_indicators/2/code': 'APE_HFRAT',
|
||||
'demand_indicators/2/isActive': True,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'data': None, 'err': 0}
|
||||
|
||||
# no child added
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert len(res['data']['childList']) == nb_childs
|
||||
|
||||
# check indicators
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == [
|
||||
'APE_COMPO2',
|
||||
'APE_COMPO4',
|
||||
'APE_FIRSTC',
|
||||
'APE_HAND',
|
||||
'APE_NAIM',
|
||||
]
|
||||
|
||||
url = conn + '/read-child?NameID=%s&child_id=%s' % (create_data['name_id'], create_data['maggie_num'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == [
|
||||
'APE_COMPO3',
|
||||
'APE_HBOTH',
|
||||
'APE_HPAR',
|
||||
'APE_MULTIACC',
|
||||
]
|
||||
|
||||
|
||||
def test_create_nursery_demand_adding_new_child(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/get-nursery-geojson'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
nurseries = resp.json()['features']
|
||||
assert len(nurseries) >= 2
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
nb_childs = len(res['data']['childList'])
|
||||
assert 'NELSON' not in [x['firstname'] for x in res['data']['childList']]
|
||||
|
||||
url = conn + '/create-nursery-demand'
|
||||
payload = {
|
||||
'family_id': create_data['family_id'],
|
||||
'child_first_name': 'Nelson',
|
||||
'child_last_name': 'Muntz',
|
||||
'child_birthdate': '2013-10-31',
|
||||
'child_gender': 'G',
|
||||
'start_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
'nursery1/idActivity': nurseries[0]['properties']['activity_id'],
|
||||
'nursery1/idUnit': nurseries[0]['properties']['unit_id'],
|
||||
'nursery1/idPlace': nurseries[0]['properties']['place_id'],
|
||||
'nursery2/idActivity': nurseries[1]['properties']['activity_id'],
|
||||
'nursery2/idUnit': nurseries[1]['properties']['unit_id'],
|
||||
'nursery2/idPlace': nurseries[1]['properties']['place_id'],
|
||||
'nursery3/idActivity': '',
|
||||
'nursery3/idUnit': '',
|
||||
'nursery3/idPlace': '',
|
||||
# indicators
|
||||
'child_indicators/0/code': 'APE_HBOTH',
|
||||
'child_indicators/0/isActive': True,
|
||||
'child_indicators/1/code': 'APE_HPAR',
|
||||
'child_indicators/1/isActive': True,
|
||||
'child_indicators/2/code': 'APE_COMPO3',
|
||||
'child_indicators/2/isActive': True,
|
||||
'child_indicators/3/code': 'APE_MULTIACC',
|
||||
'child_indicators/3/isActive': True,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert res['err'] == 0
|
||||
child_id = resp.json()['data']
|
||||
assert child_id is not None
|
||||
|
||||
# a new child is created on family
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert len(res['data']['childList']) == nb_childs + 1
|
||||
assert 'NELSON' in [x['firstname'] for x in res['data']['childList']]
|
||||
assert res['data']['childList'][nb_childs]['num'] == child_id
|
||||
|
||||
# check child indicators
|
||||
url = conn + '/read-child?NameID=%s&child_id=%s' % (create_data['name_id'], child_id)
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert res['data']['firstname'] == 'NELSON'
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == [
|
||||
'APE_COMPO3',
|
||||
'APE_HBOTH',
|
||||
'APE_HPAR',
|
||||
'APE_MULTIACC',
|
||||
]
|
|
@ -0,0 +1,24 @@
|
|||
import datetime
|
||||
|
||||
import requests
|
||||
|
||||
from .conftest import link, unlink
|
||||
|
||||
# LOISIR is a subset of EXTRACO, we only test the genaral catalog cell here
|
||||
|
||||
|
||||
def test_catalog_general_loisirs(conn, update_data):
|
||||
unlink(conn, update_data['name_id'])
|
||||
link(conn, update_data)
|
||||
url = conn + '/read-activity-list'
|
||||
params = {'ref_date': datetime.date.today().strftime('%Y-%m-%d')}
|
||||
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
[x['text'] for x in resp.json()['data']] == [
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 13h45/17h - 8/15Ans, ARGOULETS',
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 14h/16h30 - 10/15Ans, LA RAMEE',
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 15h30/17h - 8/15Ans, ARGOULETS',
|
||||
]
|
|
@ -1,308 +0,0 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def school_year(conn):
|
||||
url = conn + '/read-school-years-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
year = res['data'][0]['text']
|
||||
return year
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def exemption(conn):
|
||||
# get an exemption code
|
||||
url = conn + '/read-exemption-reasons-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
return res['data'][0]['id']
|
||||
|
||||
|
||||
def test_displaying_school_subscribed(conn, create_data, school_year, exemption):
|
||||
"""
|
||||
Read-family ramène les inscriptions aux date de visualisation paramétrées
|
||||
sur le référential YearSchool
|
||||
"""
|
||||
school_year = str(int(school_year) + 1)
|
||||
|
||||
# create a 7 year-old child
|
||||
url = conn + '/create-child?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'sexe': 'F',
|
||||
'firstname': 'Claris',
|
||||
'lastname': create_data['lastname'],
|
||||
'birth': {'dateBirth': '2016-09-12'},
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
claris_id = str(resp.json()['data']['child_id'])
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration'
|
||||
payload = {
|
||||
'numPerson': claris_id,
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CE1',
|
||||
'dateSubscribe': school_year + '-01-01',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data']['returnMessage'] is None
|
||||
assert resp.json()['data']['subscribeSchoolBean']['schoolName'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
assert resp.json()['data']['subscribeSchoolBean']['adresse'] == '101 GRANDE-RUE SAINT MICHEL'
|
||||
|
||||
# get Claris school from read-family
|
||||
url = conn + '/read-school-years-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()['data']
|
||||
date_start = [x['dateStartYearSchool'] for x in res if x['text'] == school_year][0]
|
||||
assert date_start[10] > datetime.datetime.now().strftime('%Y-%m-%d')
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
schools = [x['subscribeSchoolList'] for x in res['data']['childList'] if x['num'] == claris_id][0]
|
||||
assert len(schools) == 0 # school is filtered, but it is related to an hidden school year
|
||||
# field, not dateStartYearSchool, checked before : #2425
|
||||
|
||||
|
||||
def test_school_pre_registration_by_sector(conn, create_data, school_year, exemption):
|
||||
"""
|
||||
Pré-inscription de l'enfant de 7 ans dans son secteur
|
||||
"""
|
||||
# create a 7 year-old child
|
||||
url = conn + '/create-child?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'sexe': 'F',
|
||||
'firstname': 'Sego',
|
||||
'lastname': create_data['lastname'],
|
||||
'birth': {'dateBirth': '2016-05-09'},
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
sego_id = str(resp.json()['data']['child_id'])
|
||||
|
||||
# assert there is a school at this address
|
||||
url = conn + '/read-schools-for-address-and-level'
|
||||
params = {
|
||||
'id_street': '2317',
|
||||
'num': '4',
|
||||
'year': school_year,
|
||||
'level': 'CE1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['text'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
|
||||
# assert there is a school at child address
|
||||
url = conn + '/read-schools-for-child-and-level'
|
||||
params = {
|
||||
'child_id': sego_id,
|
||||
'year': school_year,
|
||||
'level': 'CE1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['text'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
school_id = resp.json()['data'][0]['idSchool']
|
||||
assert school_id == '2435'
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration'
|
||||
payload = {
|
||||
'numPerson': sego_id,
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CE1',
|
||||
'dateSubscribe': school_year + '-01-01',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data']['returnMessage'] is None
|
||||
assert resp.json()['data']['subscribeSchoolBean']['schoolName'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
assert resp.json()['data']['subscribeSchoolBean']['adresse'] == '101 GRANDE-RUE SAINT MICHEL'
|
||||
|
||||
# get Sego school from read-family
|
||||
url = conn + '/read-school-years-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()['data']
|
||||
date_start = [x['dateStartYearSchool'] for x in res if x['text'] == school_year][0]
|
||||
assert date_start[10] > datetime.datetime.now().strftime('%Y-%m-%d')
|
||||
# school is filtered, but it is related to an hidden school year
|
||||
# field, not dateStartYearSchool, see #2425
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
schools = [x['subscribeSchoolList'] for x in res['data']['childList'] if x['num'] == sego_id][0]
|
||||
assert len(schools) == 1
|
||||
assert schools[0]['schoolName'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
|
||||
"""
|
||||
Pré-inscription d'un enfant de 5 ans en CP avec rappprochement de fratrie pour celui de 7 ans :
|
||||
rapprochement dans le secteur de l'enfant.
|
||||
"""
|
||||
# get Sego school
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': sego_id,
|
||||
'year': school_year,
|
||||
'level': 'CE1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
schools = data['childSubscribeSchoolInformation']['subscribeSchoolYearList']
|
||||
assert len(schools) == 1
|
||||
assert schools[0]['subscribeSchool']['school']['idSchool'] == school_id
|
||||
assert schools[0]['subscribeSchool']['perim']['idPerim'] == '2707'
|
||||
|
||||
url = conn + '/create-child-school-pre-registration-with-sibling'
|
||||
payload = {
|
||||
'numPerson': create_data['maggie_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'GS',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idSchoolRequested': school_id,
|
||||
'numPersonSibling': sego_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert 'returnMessage' not in resp.json()
|
||||
assert resp.json()['data']['schoolName'] == 'CALAS MATERNELLE'
|
||||
assert resp.json()['data']['adresse'] == '47 RUE ACHILLE VIADIEU' # same sector
|
||||
|
||||
|
||||
def test_school_pre_registration_by_exemption(conn, create_data, school_year, exemption):
|
||||
"""
|
||||
Pré-inscription de l'enfant de 9 ans en dérogation :
|
||||
c'est une dérogation avec sélection du motif sur un établissement hors secteur
|
||||
"""
|
||||
# school list
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'year': school_year,
|
||||
'level': 'CM1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
schools = data['childSubscribeSchoolInformation']['subscribeSchoolInformation']['derogSchoolList']
|
||||
assert len(schools) > 1
|
||||
school_id = schools[0]['id']
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration-with-exemption'
|
||||
payload = {
|
||||
'numPerson': create_data['bart_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CM1',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idRequestSchool1': school_id,
|
||||
'derogReasonCode': exemption,
|
||||
'derogComment': 'bla',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert 'returnMessage' not in resp.json()
|
||||
assert resp.json()['data']['schoolName'] == 'AMIDONNIERS ELEMENTAIRE'
|
||||
assert resp.json()['data']['adresse'] == '123 ALL DE BRIENNE'
|
||||
|
||||
"""
|
||||
Pré-inscription de l'autre enfant de 5 ans en CP
|
||||
avec rapprochement de fratrie pour celui de 9 ans :
|
||||
rapprochement hors du secteur de l'enfant.
|
||||
"""
|
||||
|
||||
# check E124 error
|
||||
# get a school that do not provide a level in its sector
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['hugo_num'],
|
||||
'year': school_year,
|
||||
'level': 'GS',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert [
|
||||
x['idSchool']
|
||||
for x in data['childSubscribeSchoolInformation']['subscribeSchoolInformation']['derogSchoolList']
|
||||
if x['text'] == 'DIEUZAIDE JEAN MATERNELLE'
|
||||
] == ['2437']
|
||||
|
||||
# try to book on a sector that do not provide the requested level
|
||||
url = conn + '/create-child-school-pre-registration-with-sibling'
|
||||
payload = {
|
||||
'numPerson': create_data['hugo_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CP',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idSchoolRequested': '2437',
|
||||
'numPersonSibling': create_data['bart_num'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 1
|
||||
assert resp.json()['err_class'] == 'passerelle.utils.soap.SOAPFault'
|
||||
assert 'E124' in resp.json()['err_desc']
|
||||
|
||||
# get Bart school
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'year': school_year,
|
||||
'level': 'CM1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
schools = data['childSubscribeSchoolInformation']['subscribeSchoolYearList']
|
||||
assert len(schools) == 1
|
||||
assert schools[0]['subscribeSchool']['school']['idSchool'] == school_id
|
||||
assert schools[0]['subscribeSchool']['perim']['idPerim'] == '2663'
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration-with-sibling'
|
||||
payload = {
|
||||
'numPerson': create_data['hugo_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'GS',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idSchoolRequested': school_id,
|
||||
'numPersonSibling': create_data['bart_num'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert 'returnMessage' not in resp.json()
|
||||
assert resp.json()['data']['schoolName'] == 'AMIDONNIERS MATERNELLE'
|
||||
assert resp.json()['data']['adresse'] == '125 ALL DE BRIENNE'
|
|
@ -1,47 +1,14 @@
|
|||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
def test_catalog_personnalise_extrasco(extrasco_subscribe_info):
|
||||
assert (
|
||||
extrasco_subscribe_info['info']['activity']['libelle1']
|
||||
== 'PUBLIK ADL ELEMENTAIRE Maourine JUIN 22/23(NE PAS UTILISER)'
|
||||
)
|
||||
assert extrasco_subscribe_info['info']['calendarGeneration']['code'] == 'NOT_REQUIRED'
|
||||
assert extrasco_subscribe_info['info']['activity']['libelle1'] == 'ADL ELEMENTAIRE Maourine Avril 2023'
|
||||
assert extrasco_subscribe_info['info']['calendarGeneration']['code'] == 'REQUIRED'
|
||||
assert extrasco_subscribe_info['info']['billingInformation'] == {
|
||||
'modeFact': 'PRESENCE',
|
||||
'quantity': None,
|
||||
'unitPrice': 11.5,
|
||||
'unitPrice': 43.0,
|
||||
}
|
||||
assert extrasco_subscribe_info['info']['activity']['blocNoteList'] == [
|
||||
{
|
||||
'note': 'Lien vers le réglement intérieur :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/03/23/flyer-sejour.pdf\r\nLien vers arrêté municipal :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/04/05/arrete-municipal.pdf',
|
||||
'numIndex': 1,
|
||||
}
|
||||
]
|
||||
assert (
|
||||
extrasco_subscribe_info['info']['agenda'][0]['details']['activity_label']
|
||||
== 'ADL ELEMENTAIRE Maourine Juin'
|
||||
)
|
||||
|
||||
|
||||
def test_catalog_personnalise_extrasco2(extrasco_subscribe_info2):
|
||||
assert (
|
||||
extrasco_subscribe_info2['info']['activity']['libelle1']
|
||||
== 'PUBLIK ADL MATERNELLE Lardenne JUIN 22/23 (NEPAS UTILISER)'
|
||||
)
|
||||
assert extrasco_subscribe_info2['info']['calendarGeneration']['code'] == 'FORBIDDEN'
|
||||
assert extrasco_subscribe_info2['info']['billingInformation'] == {
|
||||
'modeFact': 'PRESENCE',
|
||||
'quantity': None,
|
||||
'unitPrice': 11.5,
|
||||
}
|
||||
assert extrasco_subscribe_info2['info']['activity']['blocNoteList'] == [
|
||||
{
|
||||
'note': 'Lien vers le réglement intérieur :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/03/23/flyer-sejour.pdf\r\nLien vers arrêté municipal :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/04/05/arrete-municipal.pdf',
|
||||
'numIndex': 1,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_direct_subscribe(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
|
@ -104,11 +71,16 @@ def test_subscribe_with_conveyance(conn, create_data, extrasco_subscribe_info):
|
|||
def test_subscribe_with_recurrent_week(conn, create_data, extrasco_subscribe_info):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
assert [(x['id'], x['day']) for x in extrasco_subscribe_info['info']['recurrent_week']] == [
|
||||
('1-X', 'Lundi'),
|
||||
('2-X', 'Mardi'),
|
||||
('3-X', 'Mercredi'),
|
||||
('4-X', 'Jeudi'),
|
||||
('5-X', 'Vendredi'),
|
||||
('1-C', 'Lundi'),
|
||||
('1-B', 'Lundi'),
|
||||
('2-C', 'Mardi'),
|
||||
('2-B', 'Mardi'),
|
||||
('3-C', 'Mercredi'),
|
||||
('3-B', 'Mercredi'),
|
||||
('4-C', 'Jeudi'),
|
||||
('4-B', 'Jeudi'),
|
||||
('5-C', 'Vendredi'),
|
||||
('5-B', 'Vendredi'),
|
||||
]
|
||||
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
|
@ -119,7 +91,7 @@ def test_subscribe_with_recurrent_week(conn, create_data, extrasco_subscribe_inf
|
|||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'recurrent_week': ['1-X', '2-X'],
|
||||
'recurrent_week': ['1-B', '2-C'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
|
@ -139,16 +111,6 @@ def test_subscribe_with_recurrent_week(conn, create_data, extrasco_subscribe_inf
|
|||
assert resp.json()['err'] == 0
|
||||
assert any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
# check quantity into basket
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
line = resp.json()['data'][0]['lignes'][0]
|
||||
assert line['prixUnit'] == 11.5
|
||||
assert line['qte'] > 0
|
||||
assert line['montant'] == line['prixUnit'] * line['qte']
|
||||
|
||||
# remove subscription
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
|
@ -173,23 +135,7 @@ def test_subscribe_with_agenda(conn, create_data, extrasco_subscribe_info):
|
|||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_perisco_bookings():
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return [
|
||||
item
|
||||
for item in resp.json()['data']
|
||||
if item['details']['activity_id'] == extrasco_subscribe_info['activity']['id']
|
||||
]
|
||||
|
||||
# subscribe without providing calendar
|
||||
# subscribe witout providing calandar
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
|
@ -206,7 +152,6 @@ def test_subscribe_with_agenda(conn, create_data, extrasco_subscribe_info):
|
|||
|
||||
# no booking
|
||||
assert not any(x['prefill'] for x in get_bookings())
|
||||
assert not any(x['prefill'] for x in get_perisco_bookings())
|
||||
|
||||
# book using info calendar gabarit (booking registered from w.c.s. form)
|
||||
assert len(extrasco_subscribe_info['info']['agenda']) > 0
|
||||
|
@ -228,17 +173,6 @@ def test_subscribe_with_agenda(conn, create_data, extrasco_subscribe_info):
|
|||
|
||||
# there is now 2 bookings
|
||||
assert len([x['prefill'] for x in get_bookings() if x['prefill'] is True]) == 2
|
||||
perisco_bookings = get_perisco_bookings()
|
||||
assert len([x['prefill'] for x in perisco_bookings if x['prefill'] is True]) == 2
|
||||
assert perisco_bookings[0]['details']['activity_label'] == 'ADL ELEMENTAIRE Maourine Juin'
|
||||
|
||||
# check quantity into basket
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
line = resp.json()['data'][0]['lignes'][0]
|
||||
assert (line['prixUnit'], line['qte'], line['montant']) == (11.5, 0.0, 0.0)
|
||||
|
||||
# unbook slots
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
|
@ -262,61 +196,3 @@ def test_subscribe_with_agenda(conn, create_data, extrasco_subscribe_info):
|
|||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
|
||||
@pytest.mark.xfail(run=False)
|
||||
def test_daily_capacity(conn, create_data2, extrasco_subscribe_info3):
|
||||
assert extrasco_subscribe_info3['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def subscribe(child):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['%s_num' % child],
|
||||
'activity_id': extrasco_subscribe_info3['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info3['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info3['place']['id'],
|
||||
'start_date': extrasco_subscribe_info3['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info3['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']['basket']['id']
|
||||
|
||||
def book(child, slot):
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['%s_num' % child],
|
||||
'activity_id': extrasco_subscribe_info3['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info3['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info3['unit']['dateEnd'][:10],
|
||||
'booking_list': [slot],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
# subscribe all family childs
|
||||
basket_id = subscribe('bart')
|
||||
for child in 'lisa', 'maggie', 'hugo':
|
||||
assert subscribe(child) == basket_id
|
||||
|
||||
# book all childs on the same day
|
||||
assert len(extrasco_subscribe_info3['info']['agenda']) > 0
|
||||
assert not any(x['prefill'] for x in extrasco_subscribe_info3['info']['agenda'])
|
||||
slots = [x['id'] for x in extrasco_subscribe_info3['info']['agenda'] if x['disabled'] is False]
|
||||
for child in 'bart', 'lisa', 'maggie':
|
||||
resp = book(child, slots[-1])
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True]
|
||||
resp = book('hugo', slots[-1])
|
||||
assert resp.json()['err'] == 1
|
||||
assert resp.json()['err_desc'] == 0
|
||||
|
||||
# # remove subscriptions
|
||||
# url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
# payload = {'basket_id': basket_id}
|
||||
# resp = requests.post(url, json=payload)
|
||||
# resp.raise_for_status()
|
||||
# assert resp.json()['err'] == 0
|
|
@ -1,369 +0,0 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import link, unlink
|
||||
|
||||
|
||||
def test_perisco(perisco_subscribe_info):
|
||||
assert perisco_subscribe_info['info']['activity']['libelle1'] == 'TEST TEMPS DU MIDI 22/23'
|
||||
|
||||
|
||||
def test_perisco_adulte(perisco_subscribe_adulte_info):
|
||||
assert perisco_subscribe_adulte_info['info']['activity']['libelle1'] == 'TEST RESTAURATION ADULTE 22/23'
|
||||
|
||||
|
||||
def test_perisco_agenda(conn, create_data, perisco_subscribe_info):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_info['place']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# find first available booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
booking = None
|
||||
for booking in resp.json()['data']:
|
||||
if booking['disabled'] is False:
|
||||
break
|
||||
else:
|
||||
raise Exception('no booking available')
|
||||
assert booking['details']['activity_id'] == perisco_subscribe_info['activity']['id']
|
||||
assert booking['details']['activity_label'] == 'Temps du midi'
|
||||
assert booking['prefill'] is False
|
||||
|
||||
# book activity
|
||||
url = conn + '/update-child-agenda?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [booking['id']],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {
|
||||
'updated': True,
|
||||
'count': 1,
|
||||
'changes': [
|
||||
{
|
||||
'booked': True,
|
||||
'activity_id': booking['details']['activity_id'],
|
||||
'activity_label': 'Temps du midi',
|
||||
'day': booking['details']['day_str'],
|
||||
}
|
||||
],
|
||||
'err': 0,
|
||||
}
|
||||
|
||||
# check booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [x['prefill'] for x in resp.json()['data'] if x['id'] == booking['id']][0] is True
|
||||
|
||||
|
||||
def test_perisco_agenda_adulte(conn, create_data2, perisco_subscribe_adulte_info):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['rl1_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_adulte_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_adulte_info['place']['id'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# find first available booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl1_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
booking = None
|
||||
for booking in resp.json()['data']:
|
||||
if booking['disabled'] is False:
|
||||
break
|
||||
else:
|
||||
raise Exception('no booking available')
|
||||
assert booking['details']['activity_id'] == perisco_subscribe_adulte_info['activity']['id']
|
||||
assert booking['details']['activity_label'] == 'RESTAURATION ADULTE'
|
||||
assert booking['prefill'] is False
|
||||
|
||||
# book activity
|
||||
url = conn + '/update-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'child_id': create_data2['rl1_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [booking['id']],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {
|
||||
'updated': True,
|
||||
'count': 1,
|
||||
'changes': [
|
||||
{
|
||||
'booked': True,
|
||||
'activity_id': booking['details']['activity_id'],
|
||||
'activity_label': 'RESTAURATION ADULTE',
|
||||
'day': booking['details']['day_str'],
|
||||
}
|
||||
],
|
||||
'err': 0,
|
||||
}
|
||||
|
||||
# check booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl1_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [x['prefill'] for x in resp.json()['data'] if x['id'] == booking['id']][0] is True
|
||||
|
||||
|
||||
def test_perisco_recurrent_week(conn, create_data, perisco_subscribe_info, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
# no subscribed activity
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 0
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_info['place']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['id'] == perisco_subscribe_info['activity']['id']
|
||||
assert [(x['text'], x['libelle'], x['libelle2']) for x in resp.json()['data']] == [
|
||||
('Temps du midi', 'TEST TEMPS DU MIDI 22/23', 'Temps du midi'),
|
||||
]
|
||||
|
||||
# get recurent-week gabarit
|
||||
url = conn + '/get-recurrent-week?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'ref_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [(x['id'], x['day']) for x in resp.json()['data']] == [
|
||||
('1-X', 'Lundi'),
|
||||
('2-X', 'Mardi'),
|
||||
('4-X', 'Jeudi'),
|
||||
('5-X', 'Vendredi'),
|
||||
]
|
||||
|
||||
# no booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['maggie_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert not any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
# set recurent-week gabarit
|
||||
url = conn + '/update-recurrent-week?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'recurrent_week': ['1-X', '2-X'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
|
||||
# there is now some bookings
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['maggie_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
|
||||
def test_perisco_recurrent_week_adulte(conn, create_data2, perisco_subscribe_adulte_info, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
# no subscribed activity
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 0
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_adulte_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_adulte_info['place']['id'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['id'] == perisco_subscribe_adulte_info['activity']['id']
|
||||
assert [(x['text'], x['libelle'], x['libelle2']) for x in resp.json()['data']] == [
|
||||
('RESTAURATION ADULTE', 'TEST RESTAURATION ADULTE 22/23', 'RESTAURATION ADULTE')
|
||||
]
|
||||
|
||||
# get recurent-week gabarit
|
||||
url = conn + '/get-recurrent-week?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'ref_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [(x['id'], x['day']) for x in resp.json()['data']] == [
|
||||
('1-X', 'Lundi'),
|
||||
('2-X', 'Mardi'),
|
||||
('3-X', 'Mercredi'),
|
||||
('4-X', 'Jeudi'),
|
||||
('5-X', 'Vendredi'),
|
||||
]
|
||||
|
||||
# no booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl2_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert not any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
# set recurent-week gabarit
|
||||
url = conn + '/update-recurrent-week?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
'recurrent_week': ['1-X', '2-X'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
|
||||
# there is now some bookings
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl2_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert any(x['prefill'] for x in resp.json()['data'])
|
|
@ -0,0 +1,205 @@
|
|||
import requests
|
||||
|
||||
|
||||
def test_basket_subscribe(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'EXTRASCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_bookings(person_id):
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 3
|
||||
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert data[0]['text'] == 'ENFANCE LOISIRS ET PE'
|
||||
assert len(data[0]['lignes']) == 3
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
# get 3 idIns because we subscribe a generic unit
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 3
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# cannot subscribe Bart twice
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 1
|
||||
assert 'E1019' in resp.json()['err_desc']
|
||||
assert len(get_baskets()) == 1
|
||||
|
||||
# delete basket
|
||||
# should be call by user or by cron job
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# delete (generic) basket line for Bart
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(data[0]['lignes']) == 6
|
||||
basket_id = data[0]['id']
|
||||
# idIns for the generic unit
|
||||
line_id = [
|
||||
y['id']
|
||||
for x in data
|
||||
for y in x['lignes']
|
||||
if y['personneInfo']['numPerson'] == int(create_data['bart_num'])
|
||||
if y['inscription']['idUnit'] == extrasco_subscribe_info['unit']['id']
|
||||
][0]
|
||||
url = conn + '/delete-basket-line?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'basket_id': basket_id,
|
||||
'line_id': line_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['lignes']}) == 3
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(get_baskets()) == 1
|
||||
assert len(data[0]['lignes']) == 3
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# re-subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# add bookings to Bart
|
||||
slots = [x['id'] for x in extrasco_subscribe_info['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['bart_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# add bookings to Maggie
|
||||
slots = [':'.join([create_data['maggie_num']] + x.split(':')[1:]) for x in slots]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['maggie_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# validate basket
|
||||
url = conn + '/validate-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data['idInsLst']) == 6
|
||||
assert len(data['factureLst']) == 0 # No invoice #2187
|
||||
assert get_baskets() == []
|
||||
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# call cancelInvoiceAndDeleteSubscribeList de remove subscriptions
|
|
@ -1,261 +0,0 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import get_subscription_info, link, unlink
|
||||
|
||||
# LOISIR is like EXTRACO (most tests are redondants) but :
|
||||
# * there is no calendar (days) to provide.
|
||||
# * there is a general catalog to display
|
||||
|
||||
|
||||
def test_catalog_general_loisirs(conn, update_data):
|
||||
unlink(conn, update_data['name_id'])
|
||||
link(conn, update_data)
|
||||
url = conn + '/read-activity-list'
|
||||
params = {'ref_date': datetime.date.today().strftime('%Y-%m-%d')}
|
||||
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
labels = [x['text'] for x in resp.json()['data']]
|
||||
assert (
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 13h45/17h - 8/15Ans, ARGOULETS'
|
||||
in labels
|
||||
)
|
||||
assert (
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 14h/16h30 - 10/15Ans, LA RAMEE'
|
||||
in labels
|
||||
)
|
||||
assert (
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 15h30/17h - 8/15Ans, ARGOULETS'
|
||||
in labels
|
||||
)
|
||||
assert 'Promenade forêt enchantée, TEST promenade forêt enchantée, TERRITOIRE OUEST' in labels
|
||||
assert 'Vitrail Fusing 1/2 Je Adultes, Inscription annuelle, Centre Culturel ALBAN MINVILLE' in labels
|
||||
|
||||
for item in resp.json()['data']:
|
||||
if (
|
||||
item['text']
|
||||
== 'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 13h45/17h - 8/15Ans, ARGOULETS'
|
||||
):
|
||||
assert item['criterias'] == {
|
||||
'service': {'text': 'Service', 'data': {'sports': 'Sports'}, 'order': ['sports']},
|
||||
'nature': {
|
||||
'text': "Nature de l'activité",
|
||||
'data': {'1': 'Activités Régulières'},
|
||||
'order': ['1'],
|
||||
},
|
||||
'type': {
|
||||
'text': "Type de l'activité",
|
||||
'data': {'activites-aquatiques': 'Activités Aquatiques'},
|
||||
'order': ['activites-aquatiques'],
|
||||
},
|
||||
'public': {
|
||||
'text': 'Public',
|
||||
'data': {'1': 'Enfant (3-11 ans)', '2': 'Ado (12-17 ans)'},
|
||||
'order': ['1', '2'],
|
||||
},
|
||||
'day': {'text': 'Jours', 'data': {'3': 'Mercredi'}, 'order': ['3']},
|
||||
'place': {'text': 'Lieu', 'data': {'A10053179757': 'ARGOULETS'}, 'order': ['A10053179757']},
|
||||
}
|
||||
assert item['activity']['activityPortail']['blocNoteList'] == [
|
||||
{
|
||||
'note': "Activité ayant lieu le Mercredi, merci de choisir votre tranche horraire en fonction de l'âge de votre enfant.",
|
||||
'numIndex': 1,
|
||||
}
|
||||
]
|
||||
if item['text'] == 'Promenade forêt enchantée, TEST promenade forêt enchantée, TERRITOIRE OUEST':
|
||||
assert item['criterias'] == {
|
||||
'service': {'text': 'Service', 'data': {'sports': 'Sports'}, 'order': ['sports']},
|
||||
'nature': {
|
||||
'text': "Nature de l'activité",
|
||||
'data': {'1': 'Activités Régulières'},
|
||||
'order': ['1'],
|
||||
},
|
||||
'type': {
|
||||
'text': "Type de l'activité",
|
||||
'data': {'activite-pedestre': 'Activité Pédestre'},
|
||||
'order': ['activite-pedestre'],
|
||||
},
|
||||
'public': {'text': 'Public', 'data': {'5': 'Sénior (60 ans et plus)'}, 'order': ['5']},
|
||||
'day': {
|
||||
'text': 'Jours',
|
||||
'data': {'1': 'Lundi', '2': 'Mardi', '3': 'Mercredi', '4': 'Jeudi', '5': 'Vendredi'},
|
||||
'order': ['1', '2', '3', '4', '5'],
|
||||
},
|
||||
'place': {
|
||||
'text': 'Lieu',
|
||||
'data': {'A10056517597': 'TERRITOIRE OUEST'},
|
||||
'order': ['A10056517597'],
|
||||
},
|
||||
}
|
||||
assert item['activity']['activityPortail']['blocNoteList'] == [
|
||||
{'note': 'Activité de promenade en forêt.', 'numIndex': 1}
|
||||
]
|
||||
|
||||
|
||||
def test_catalog_personnalise_loisirs(loisirs_subscribe_info):
|
||||
assert (
|
||||
loisirs_subscribe_info['info']['activity']['libelle1']
|
||||
== 'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES'
|
||||
)
|
||||
assert loisirs_subscribe_info['info']['calendarGeneration']['code'] == 'REQUIRED'
|
||||
assert [(x['id'], x['day']) for x in loisirs_subscribe_info['info']['recurrent_week']] == []
|
||||
assert loisirs_subscribe_info['info']['billingInformation'] == {
|
||||
'modeFact': 'FORFAIT',
|
||||
'quantity': 1.0,
|
||||
'unitPrice': 88.5,
|
||||
}
|
||||
|
||||
|
||||
def test_catalog_personnalise_loisirs_not_allowed(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
try:
|
||||
get_subscription_info(
|
||||
'LOISIRS',
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES',
|
||||
'MERCREDI - 15h30/17h - 8/15Ans',
|
||||
'ARGOULETS',
|
||||
conn,
|
||||
create_data['name_id'],
|
||||
create_data['rl1_num'],
|
||||
reference_year,
|
||||
)
|
||||
except Exception:
|
||||
return
|
||||
assert False, 'Adult can subscribe to child activity'
|
||||
|
||||
|
||||
def test_direct_subscribe(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['hugo_num'],
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# no idIns provided to remove subscription later
|
||||
assert resp.json()['data'] == {'controlOK': True, 'message': None}
|
||||
|
||||
|
||||
def test_direct_subscribe_out_town(conn, create_data2, loisirs_subscribe_info2, reference_year):
|
||||
assert loisirs_subscribe_info2['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['hugo_num'],
|
||||
'activity_id': loisirs_subscribe_info2['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info2['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info2['place']['id'],
|
||||
'start_date': loisirs_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info2['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# no idIns provided to remove subscription later
|
||||
assert resp.json()['data'] == {'controlOK': True, 'message': None}
|
||||
|
||||
|
||||
def test_subscribe_to_basket(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
basket_id = resp.json()['data']['basket']['id']
|
||||
|
||||
# remove subscription
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
|
||||
@pytest.mark.xfail(run=False)
|
||||
def test_global_capacity(conn, create_data2, loisirs_subscribe_info3, reference_year):
|
||||
assert loisirs_subscribe_info3['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
# subscribe Bart
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
# url = conn + '/add-person-basket-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['bart_num'],
|
||||
'activity_id': loisirs_subscribe_info3['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info3['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info3['place']['id'],
|
||||
'start_date': loisirs_subscribe_info3['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info3['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
# basket_id = resp.json()['data']['basket']['id']
|
||||
|
||||
# subscribe Lisa
|
||||
payload['person_id'] = create_data2['lisa_num']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# subscribe Maggie
|
||||
payload['person_id'] = create_data2['maggie_num']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# can't subscribe Huggo
|
||||
payload['person_id'] = create_data2['hugo_num']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 1
|
||||
assert resp.json()['err_desc'] == ''
|
||||
|
||||
# check capacity on main catalog
|
||||
url = conn + '/read-activity-list'
|
||||
params = {'ref_date': datetime.date.today().strftime('%Y-%m-%d')}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
for item in resp.json()['data']:
|
||||
if item['activity']['libelle'] == 'PUBLIK Vitrail Fusing 1/2 Je Adultes 2022/2023 - Mardi 14h-1':
|
||||
import pdb
|
||||
|
||||
pdb.set_trace()
|
||||
|
||||
# # remove subscriptions
|
||||
# url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
# payload = {'basket_id': basket_id}
|
||||
# resp = requests.post(url, json=payload)
|
||||
# resp.raise_for_status()
|
||||
# assert resp.json()['err'] == 0
|
|
@ -0,0 +1,109 @@
|
|||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import diff, link, unlink
|
||||
|
||||
|
||||
def test_direct_debit_order(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'codeRegie': '102',
|
||||
'bank/bankBIC': 'BDFEFR2T',
|
||||
'bank/bankIBAN': 'FR7630001007941234567890185',
|
||||
'bank/bankRUM': 'xxx',
|
||||
'bank/dateStart': '2023-01-01',
|
||||
'bank/bankAddress': '75049 PARIS cedex 01',
|
||||
'bank/civility': 'x',
|
||||
'bank/lastName': 'Ewing',
|
||||
'bank/firstName': 'John Ross',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
url = conn + '/get-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'codeRegie': '102',
|
||||
'dateRef': '2023-01-01',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
res['data']['numPerson'] = 'N/A'
|
||||
assert diff(res['data'], 'test_get_rl1_direct_debit_order.json')
|
||||
|
||||
|
||||
@pytest.mark.xfail(run=False)
|
||||
def test_basket_subscribe(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 3
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert data[0]['text'] == 'ENFANCE LOISIRS ET PE'
|
||||
assert len(data[0]['lignes']) == 3
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
# get 3 idIns because we subscribe a generic unit
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 3
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# validate basket
|
||||
url = conn + '/validate-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data['idInsLst']) == 6
|
||||
assert len(data['factureLst']) == 0
|
||||
assert get_baskets() == []
|
||||
|
||||
# to continue :
|
||||
# cancelInvoiceAndDeleteSubscribeList
|
||||
# payInvoice
|
|
@ -1,557 +0,0 @@
|
|||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
def test_basket_subscribe_extrasco(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'EXTRASCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_bookings(person_id):
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1 # 3 sur Larden
|
||||
|
||||
subs = subscriptions(create_data['bart_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 1
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert data[0]['text'] == 'ENFANCE LOISIRS'
|
||||
assert len(data[0]['lignes']) == 1 # 3 sur Larden
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
# get 3 idIns because we subscribe a generic unit
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1 # 3 sur Larden
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# cannot subscribe Bart twice
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 1
|
||||
assert 'E1019' in resp.json()['err_desc']
|
||||
assert len(get_baskets()) == 1
|
||||
|
||||
# delete basket
|
||||
# should be call by user or by cron job
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
subs = subscriptions(create_data['maggie_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 1
|
||||
|
||||
# delete (generic) basket line for Bart
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(data[0]['lignes']) == 2 # 6 sur Larden
|
||||
basket_id = data[0]['id']
|
||||
# line for the generic unit for Bart
|
||||
line_id = [
|
||||
y['id']
|
||||
for x in data
|
||||
for y in x['lignes']
|
||||
if y['personneInfo']['numPerson'] == int(create_data['bart_num'])
|
||||
if y['inscription']['idUnit'] == extrasco_subscribe_info['unit']['id']
|
||||
][0]
|
||||
url = conn + '/delete-basket-line?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'basket_id': basket_id,
|
||||
'line_id': line_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['lignes']}) == 1 # 3 sur Larden
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(get_baskets()) == 1
|
||||
assert len(data[0]['lignes']) == 1 # 3 sur Larden
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# re-subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# add bookings to Bart
|
||||
slots = [x['id'] for x in extrasco_subscribe_info['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['bart_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# add bookings to Maggie
|
||||
slots = [':'.join([create_data['maggie_num']] + x.split(':')[1:]) for x in slots]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['maggie_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# delete basket
|
||||
# should be call by user or by cron job
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
|
||||
@pytest.mark.xfail(run=False)
|
||||
def test_basket_subscribe_extrasco2(conn, create_data, extrasco_subscribe_info2, reference_year):
|
||||
"""Subscribing to a generic unit"""
|
||||
assert extrasco_subscribe_info2['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info2['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info2['place']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'EXTRASCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_bookings(person_id):
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1 # 3 expected
|
||||
|
||||
subs = subscriptions(create_data['bart_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 2
|
||||
assert [x['libelle'] for x in subs[0]['subscribesUnit']] == [
|
||||
'PUBLIK ADL MATERNELLE Lardenne JUIN 22/23 (NEPAS UTILISER)',
|
||||
'PUBLIK ADL MATER JOURNEE AVEC REPAS',
|
||||
]
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert data[0]['text'] == 'ENFANCE LOISIRS'
|
||||
assert len(data[0]['lignes']) == 1 # 3 expected
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
# we should get 3 idIns because we subscribe a generic unit
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1 # 3 expected
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# cannot subscribe Bart twice
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 1
|
||||
assert 'E1019' in resp.json()['err_desc']
|
||||
assert len(get_baskets()) == 1
|
||||
|
||||
# delete basket
|
||||
# should be call by user or by cron job
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# delete (generic) basket line for Bart
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(data[0]['lignes']) == 2 # 6 sur Larden
|
||||
basket_id = data[0]['id']
|
||||
# line for the generic unit for Bart
|
||||
line_id = [
|
||||
y['id']
|
||||
for x in data
|
||||
for y in x['lignes']
|
||||
if y['personneInfo']['numPerson'] == int(create_data['bart_num'])
|
||||
if y['inscription']['idUnit'] == extrasco_subscribe_info2['unit']['id']
|
||||
][0]
|
||||
url = conn + '/delete-basket-line?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'basket_id': basket_id,
|
||||
'line_id': line_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['lignes']}) == 1 # 3 sur Larden
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(get_baskets()) == 1
|
||||
assert len(data[0]['lignes']) == 1 # 3 sur Larden
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# re-subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# add bookings to Bart
|
||||
slots = [x['id'] for x in extrasco_subscribe_info2['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['bart_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# add bookings to Maggie
|
||||
slots = [':'.join([create_data['maggie_num']] + x.split(':')[1:]) for x in slots]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['maggie_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# delete basket
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
|
||||
def test_basket_subscribe_loisirs(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'LOISIRS',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return [
|
||||
x
|
||||
for x in resp.json()['data']
|
||||
if x['libelle'] == 'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES'
|
||||
]
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 109
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1
|
||||
subs = subscriptions(create_data['bart_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 2
|
||||
assert [x['libelle'] for x in subs[0]['subscribesUnit']] == [
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES',
|
||||
'MERCREDI - 15h30/17h - 8/15Ans',
|
||||
]
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 109
|
||||
assert data[0]['text'] == 'SPORT'
|
||||
assert len(data[0]['lignes']) == 1
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1
|
||||
assert data[0]['lignes'][0]['montant'] == 88.5
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# cannot subscribe Bart twice
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 1
|
||||
assert 'E1019' in resp.json()['err_desc']
|
||||
assert len(get_baskets()) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
subs = subscriptions(create_data['maggie_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 2
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['id'] == basket_id
|
||||
assert data[0]['codeRegie'] == 109
|
||||
assert data[0]['text'] == 'SPORT'
|
||||
assert len(data[0]['lignes']) == 2
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 2
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 2
|
||||
assert all(x['montant'] == 88.5 for x in data[0]['lignes'])
|
||||
|
||||
# delete basket line for Bart
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(data[0]['lignes']) == 2
|
||||
basket_id = data[0]['id']
|
||||
# line for Bart
|
||||
line_id = [
|
||||
y['id']
|
||||
for x in data
|
||||
for y in x['lignes']
|
||||
if y['personneInfo']['numPerson'] == int(create_data['bart_num'])
|
||||
][0]
|
||||
url = conn + '/delete-basket-line?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'basket_id': basket_id,
|
||||
'line_id': line_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['codeRegie'] == 109
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['lignes']}) == 1
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(get_baskets()) == 1
|
||||
assert len(data[0]['lignes']) == 1
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# delete basket
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
|
@ -1,346 +0,0 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import diff, link, unlink
|
||||
|
||||
|
||||
def test_direct_debit_order(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'codeRegie': '102',
|
||||
'bank/bankBIC': 'BDFEFR2T',
|
||||
'bank/bankIBAN': 'FR7630001007941234567890185',
|
||||
'bank/bankRUM': 'xxx',
|
||||
'bank/dateStart': '2023-01-01',
|
||||
'bank/bankAddress': '75049 PARIS cedex 01',
|
||||
'bank/civility': 'x',
|
||||
'bank/lastName': 'Ewing',
|
||||
'bank/firstName': 'John Ross',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
url = conn + '/get-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'codeRegie': '102',
|
||||
'dateRef': '2023-01-01',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
res['data']['numPerson'] = 'N/A'
|
||||
assert diff(res['data'], 'test_get_rl1_direct_debit_order.json')
|
||||
|
||||
|
||||
def test_pay_invoice_loisirs(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 109
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 109
|
||||
assert data[0]['text'] == 'SPORT'
|
||||
assert len(data[0]['lignes']) == 2
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 2
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 2
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# validate basket de generate an invoice
|
||||
url = conn + '/validate-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data['idInsLst']) == 2
|
||||
assert len(data['factureLst']) == 1
|
||||
assert len(data['factureLst'][0]['lineInvoiceList']) == 2
|
||||
assert data['factureLst'][0]['regie']['code'] == 109
|
||||
invoice_num = data['factureLst'][0]['numInvoice']
|
||||
invoice_id = data['factureLst'][0]['idInvoice']
|
||||
assert get_baskets() == []
|
||||
|
||||
# get invoices paid
|
||||
url = conn + '/regie/109/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'data': [], 'err': 0}
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/109/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert data[0]['amount'] == '177' # ou juste > 0 ?
|
||||
assert data[0]['online_payment'] is True
|
||||
assert data[0]['paid'] is False
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
||||
|
||||
# payInvoice
|
||||
url = conn + '/regie/109/invoice/%s-%s/pay/' % (create_data['family_id'], invoice_num)
|
||||
payload = {
|
||||
'transaction_date': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S'),
|
||||
'transaction_id': 'xxx',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/109/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'has_invoice_for_payment': True, 'data': [], 'err': 0}
|
||||
|
||||
# get invoices paid
|
||||
url = conn + '/regie/109/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert data[0]['amount'] == '0'
|
||||
assert data[0]['total_amount'] == '177' # ou juste > 0 ?
|
||||
assert data[0]['online_payment'] is False
|
||||
assert data[0]['paid'] is True
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
||||
|
||||
|
||||
def test_payinvoice_extrasco(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'EXTRASCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_bookings(person_id):
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1
|
||||
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert len(data[0]['lignes']) == 1
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# add bookings to Bart
|
||||
slots = [x['id'] for x in extrasco_subscribe_info['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['bart_num']) if x['prefill'] is True]) > 0
|
||||
|
||||
# add bookings to Maggie
|
||||
slots = [':'.join([create_data['maggie_num']] + x.split(':')[1:]) for x in slots]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['maggie_num']) if x['prefill'] is True]) > 0
|
||||
|
||||
# validate basket
|
||||
url = conn + '/validate-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data['idInsLst']) == 2
|
||||
assert len(data['factureLst']) == 1
|
||||
assert get_baskets() == []
|
||||
assert len(data['factureLst'][0]['lineInvoiceList']) == 2
|
||||
assert data['factureLst'][0]['regie']['code'] == 105
|
||||
invoice_num = data['factureLst'][0]['numInvoice']
|
||||
invoice_id = data['factureLst'][0]['idInvoice']
|
||||
|
||||
# get invoices paid
|
||||
url = conn + '/regie/105/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'data': [], 'err': 0}
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/105/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert int(data[0]['amount']) > 0
|
||||
assert data[0]['online_payment'] is True
|
||||
assert data[0]['paid'] is False
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
||||
|
||||
# payInvoice
|
||||
url = conn + '/regie/105/invoice/%s-%s/pay/' % (create_data['family_id'], invoice_num)
|
||||
payload = {
|
||||
'transaction_date': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S'),
|
||||
'transaction_id': 'xxx',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/105/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'has_invoice_for_payment': True, 'data': [], 'err': 0}
|
||||
|
||||
# get invoices history
|
||||
url = conn + '/regie/105/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert data[0]['amount'] == '0'
|
||||
assert int(data[0]['total_amount']) > 0
|
||||
assert data[0]['online_payment'] is False
|
||||
assert data[0]['paid'] is True
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
|
@ -2,9 +2,9 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption('--url', help='Url of a passerelle Vivaticket connector instance')
|
||||
parser.addoption("--url", help="Url of a passerelle Vivaticket connector instance")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption('--url')
|
||||
return request.config.getoption("--url")
|
||||
|
|
|
@ -6,7 +6,7 @@ import requests
|
|||
|
||||
|
||||
def call_generic(conn, endpoint):
|
||||
print('%s \n' % endpoint)
|
||||
print("%s \n" % endpoint)
|
||||
url = conn + '/%s' % endpoint
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -50,7 +50,7 @@ def test_book_event(conn):
|
|||
themes = call_generic(conn, 'themes')
|
||||
random.shuffle(themes)
|
||||
payload['theme'] = themes[0]['id']
|
||||
print('Creating booking with the following payload:\n%s' % payload)
|
||||
print("Creating booking with the following payload:\n%s" % payload)
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
#!/bin/sh -ue
|
||||
|
||||
test -d wcs || git clone https://git.entrouvert.org/entrouvert/wcs.git
|
||||
(cd wcs && git pull)
|
|
@ -2,8 +2,8 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
if __name__ == '__main__':
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'passerelle.settings')
|
||||
if __name__ == "__main__":
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passerelle.settings")
|
||||
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
|
|
|
@ -102,7 +102,6 @@ class AddressResource(BaseResource):
|
|||
@endpoint(
|
||||
name='sectors',
|
||||
description=_('List related Sectorizations'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'id': {'description': _('Sector Identifier (slug)')},
|
||||
'q': {'description': _('Filter by Sector Title or Identifier')},
|
||||
|
|
|
@ -48,7 +48,7 @@ class ActesWeb(BaseResource):
|
|||
def basepath(self):
|
||||
return os.path.join(default_storage.path('actesweb'), self.slug)
|
||||
|
||||
@endpoint(methods=['post'], description=_('Create demand'))
|
||||
@endpoint(perm='can_access', methods=['post'], description=_('Create demand'))
|
||||
def create(self, request, *args, **kwargs):
|
||||
try:
|
||||
payload = json.loads(request.body)
|
||||
|
|
|
@ -1,77 +0,0 @@
|
|||
# Generated by Django 3.2.18 on 2023-07-07 10:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('base', '0030_resourcelog_base_resour_appname_298cbc_idx'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AdullactPastell',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
|
||||
),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
(
|
||||
'basic_auth_username',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication username'
|
||||
),
|
||||
),
|
||||
(
|
||||
'basic_auth_password',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication password'
|
||||
),
|
||||
),
|
||||
(
|
||||
'client_certificate',
|
||||
models.FileField(
|
||||
blank=True, null=True, upload_to='', verbose_name='TLS client certificate'
|
||||
),
|
||||
),
|
||||
(
|
||||
'trusted_certificate_authorities',
|
||||
models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs'),
|
||||
),
|
||||
(
|
||||
'verify_cert',
|
||||
models.BooleanField(blank=True, default=True, verbose_name='TLS verify certificates'),
|
||||
),
|
||||
(
|
||||
'http_proxy',
|
||||
models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'),
|
||||
),
|
||||
(
|
||||
'api_base_url',
|
||||
models.URLField(
|
||||
help_text='Example: https://pastell.example.com/api/v2/',
|
||||
max_length=128,
|
||||
verbose_name='API base URL',
|
||||
),
|
||||
),
|
||||
('token', models.CharField(blank=True, max_length=128, verbose_name='API token')),
|
||||
(
|
||||
'users',
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name='_adullact_pastell_adullactpastell_users_+',
|
||||
related_query_name='+',
|
||||
to='base.ApiUser',
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Adullact Pastell',
|
||||
},
|
||||
),
|
||||
]
|
|
@ -1,265 +0,0 @@
|
|||
# passerelle - uniform access to multiple data sources and services
|
||||
# Copyright (C) 2023 Entr'ouvert
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import base64
|
||||
from urllib import parse as urlparse
|
||||
|
||||
import requests
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.http import HttpResponse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from passerelle.base.models import BaseResource, HTTPResource
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
|
||||
FILE_OBJECT_PROPERTIES = {
|
||||
'title': _('File object'),
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'description': _('Filename'),
|
||||
},
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'description': _('Content'),
|
||||
},
|
||||
'content_type': {
|
||||
'type': 'string',
|
||||
'description': _('Content type'),
|
||||
},
|
||||
},
|
||||
'required': ['filename', 'content'],
|
||||
}
|
||||
|
||||
|
||||
DOCUMENT_CREATION_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'required': ['type'],
|
||||
'additionalProperties': True,
|
||||
'properties': {
|
||||
'type': {'type': 'string', 'description': _('Document type')},
|
||||
'file_field_name': {
|
||||
'type': 'string',
|
||||
'description': _('Document file\'s field name'),
|
||||
},
|
||||
'file': FILE_OBJECT_PROPERTIES,
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'description': _('Filename (takes precedence over filename in "file" object)'),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
DOCUMENT_FILE_UPLOAD_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'required': ['file', 'file_field_name'],
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'description': _('Filename (takes precedence over filename in "file" object)'),
|
||||
},
|
||||
'file': FILE_OBJECT_PROPERTIES,
|
||||
'file_field_name': {
|
||||
'type': 'string',
|
||||
'description': _('Document file\'s field name'),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class AdullactPastell(BaseResource, HTTPResource):
|
||||
api_base_url = models.URLField(
|
||||
max_length=128,
|
||||
verbose_name=_('API base URL'),
|
||||
help_text=_('Example: https://pastell.example.com/api/v2/'),
|
||||
)
|
||||
token = models.CharField(max_length=128, blank=True, verbose_name=_('API token'))
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
||||
log_requests_errors = False
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Adullact Pastell')
|
||||
|
||||
def clean(self, *args, **kwargs):
|
||||
if not self.token and not self.basic_auth_username:
|
||||
raise ValidationError(_('API token or authentication username and password should be defined.'))
|
||||
return super().clean(*args, **kwargs)
|
||||
|
||||
def call(self, path, method='get', params=None, **kwargs):
|
||||
url = urlparse.urljoin(self.api_base_url, path)
|
||||
if self.token:
|
||||
kwargs.update({'headers': {'Authorization': 'Bearer: %s' % self.token}, 'auth': None})
|
||||
try:
|
||||
response = self.requests.request(url=url, method=method, params=params, **kwargs)
|
||||
response.raise_for_status()
|
||||
except (requests.Timeout, requests.RequestException) as e:
|
||||
raise APIError(str(e))
|
||||
return response
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
response = self.call('version')
|
||||
except APIError as e:
|
||||
raise Exception('Pastell server is down: %s' % e)
|
||||
return {'data': response.json()}
|
||||
|
||||
def upload_file(self, entity_id, document_id, file_field_name, data, **kwargs):
|
||||
filename = kwargs.get('filename') or data['filename']
|
||||
file_data = {
|
||||
'file_content': (
|
||||
filename,
|
||||
base64.b64decode(data['content']),
|
||||
data.get('content_type'),
|
||||
)
|
||||
}
|
||||
|
||||
return self.call(
|
||||
'entite/%s/document/%s/file/%s' % (entity_id, document_id, file_field_name),
|
||||
'post',
|
||||
files=file_data,
|
||||
data={'file_name': filename},
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
description=_('List entities'),
|
||||
datasource=True,
|
||||
)
|
||||
def entities(self, request):
|
||||
data = []
|
||||
response = self.call('entite')
|
||||
for item in response.json():
|
||||
item['id'] = item['id_e']
|
||||
item['text'] = item['denomination']
|
||||
data.append(item)
|
||||
return {'data': data}
|
||||
|
||||
@endpoint(
|
||||
description=_('List entity documents'),
|
||||
parameters={'entity_id': {'description': _('Entity ID'), 'example_value': '42'}},
|
||||
datasource=True,
|
||||
)
|
||||
def documents(self, request, entity_id):
|
||||
if request.GET.get('id'):
|
||||
response = self.call('entite/%s/document/%s' % (entity_id, request.GET['id']))
|
||||
return {'data': response.json()}
|
||||
|
||||
data = []
|
||||
response = self.call('entite/%s/document' % entity_id)
|
||||
for item in response.json():
|
||||
item['id'] = item['id_d']
|
||||
item['text'] = item['titre']
|
||||
data.append(item)
|
||||
return {'data': data}
|
||||
|
||||
@endpoint(
|
||||
post={
|
||||
'description': _('Create a document for an entity'),
|
||||
'request_body': {'schema': {'application/json': DOCUMENT_CREATION_SCHEMA}},
|
||||
},
|
||||
name='create-document',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
},
|
||||
)
|
||||
def create_document(self, request, entity_id, post_data):
|
||||
file_data = post_data.pop('file', None)
|
||||
file_field_name = post_data.pop('file_field_name', None)
|
||||
|
||||
# create document
|
||||
response = self.call('entite/%s/document' % entity_id, 'post', params=post_data)
|
||||
document_id = response.json()['id_d']
|
||||
|
||||
# update it with other attributes
|
||||
response = self.call('entite/%s/document/%s' % (entity_id, document_id), 'patch', params=post_data)
|
||||
|
||||
# upload file if it's filled
|
||||
if file_field_name and file_data:
|
||||
self.upload_file(entity_id, document_id, file_field_name, file_data, **post_data)
|
||||
|
||||
return {'data': response.json()}
|
||||
|
||||
@endpoint(
|
||||
post={
|
||||
'description': _('Upload a file to a document'),
|
||||
'request_body': {'schema': {'application/json': DOCUMENT_FILE_UPLOAD_SCHEMA}},
|
||||
},
|
||||
name='upload-document-file',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
'document_id': {'description': _('Document ID'), 'example_value': 'hDWtdSC'},
|
||||
},
|
||||
)
|
||||
def upload_document_file(self, request, entity_id, document_id, post_data):
|
||||
file_field_name = post_data.pop('file_field_name')
|
||||
file_data = post_data.pop('file')
|
||||
response = self.upload_file(entity_id, document_id, file_field_name, file_data, **post_data)
|
||||
return {'data': response.json()}
|
||||
|
||||
@endpoint(
|
||||
description=_('Get document\'s file'),
|
||||
name='get-document-file',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
'document_id': {'description': _('Document ID'), 'example_value': 'hDWtdSC'},
|
||||
'field_name': {
|
||||
'description': _('Document file\'s field name'),
|
||||
'example_value': 'document',
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_document_file(self, request, entity_id, document_id, field_name):
|
||||
document = self.call('entite/%s/document/%s/file/%s' % (entity_id, document_id, field_name))
|
||||
response = HttpResponse(document.content, content_type=document.headers['Content-Type'])
|
||||
response['Content-Disposition'] = document.headers['Content-disposition']
|
||||
return response
|
||||
|
||||
@endpoint(
|
||||
post={
|
||||
'description': _('Run action on document'),
|
||||
'request_body': {
|
||||
'schema': {
|
||||
'application/json': {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'required': ['action_name'],
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'action_name': {'type': 'string', 'description': _('Action name')},
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
name='run-document-action',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
'document_id': {'description': _('Document ID'), 'example_value': 'hDWtdSC'},
|
||||
},
|
||||
)
|
||||
def run_document_action(self, request, entity_id, document_id, post_data):
|
||||
response = self.call(
|
||||
'entite/%s/document/%s/action/%s' % (entity_id, document_id, post_data['action_name']), 'post'
|
||||
)
|
||||
return {'data': response.json()}
|
|
@ -44,7 +44,6 @@ class AirQuality(BaseResource):
|
|||
@endpoint(
|
||||
pattern=r'^(?P<country>\w+)/(?P<city>\w+)/$',
|
||||
example_pattern='{country}/{city}/',
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'country': {'description': _('Country Code'), 'example_value': 'fr'},
|
||||
'city': {'description': _('City Name'), 'example_value': 'lyon'},
|
||||
|
|
|
@ -185,6 +185,7 @@ class APIEntreprise(BaseResource):
|
|||
METHOD_PARAM = {'description': _('method used for user identity matching'), 'example_value': 'simple'}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<association_id>\w+)/$',
|
||||
example_pattern='{association_id}/',
|
||||
description=_('Get association\'s documents'),
|
||||
|
@ -288,6 +289,7 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': document}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<siren>\w+)/$',
|
||||
example_pattern='{siren}/',
|
||||
description=_('Get firm\'s data from Infogreffe'),
|
||||
|
@ -303,6 +305,7 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': raw_data['data']}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<association_id>\w+)/$',
|
||||
example_pattern='{association_id}/',
|
||||
description=_('Get association\'s related informations'),
|
||||
|
@ -321,6 +324,7 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': res}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<siren>\w+)/$',
|
||||
example_pattern='{siren}/',
|
||||
description=_('Get firm\'s related informations'),
|
||||
|
@ -381,6 +385,7 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': {'entreprise': data, 'etablissement_siege': siege_data}}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
methods=['get'],
|
||||
pattern=r'(?P<siret>\w+)/$',
|
||||
example_pattern='{siret}/',
|
||||
|
@ -415,6 +420,7 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': res}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
methods=['get'],
|
||||
pattern=r'(?P<siret>\w+)/$',
|
||||
example_pattern='{siret}/',
|
||||
|
@ -430,6 +436,7 @@ class APIEntreprise(BaseResource):
|
|||
return self.get('v3/dgfip/etablissements/%s/chiffres_affaires' % siret, raw=True, **kwargs)
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<siren>\w+)/$',
|
||||
description=_(
|
||||
'Match firm\'s society representative against local FranceConnect identity information'
|
||||
|
|
|
@ -1,56 +0,0 @@
|
|||
# Generated by Django 3.2.18 on 2023-04-14 17:35
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('base', '0030_resourcelog_base_resour_appname_298cbc_idx'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Resource',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
|
||||
),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
(
|
||||
'api_url',
|
||||
models.URLField(
|
||||
default='https://gw.dgfip.finances.gouv.fr/impotparticulier/1.0',
|
||||
max_length=256,
|
||||
verbose_name='DGFIP API base URL',
|
||||
),
|
||||
),
|
||||
('oauth_username', models.CharField(max_length=128, verbose_name='DGFIP API Username')),
|
||||
('oauth_password', models.CharField(max_length=128, verbose_name='DGFIP API Password')),
|
||||
(
|
||||
'oauth_scopes',
|
||||
models.CharField(max_length=128, verbose_name='DGFIP API Scopes', blank=True),
|
||||
),
|
||||
(
|
||||
'id_teleservice',
|
||||
models.TextField(max_length=128, verbose_name='DGFIP API ID_Teleservice', blank=True),
|
||||
),
|
||||
(
|
||||
'users',
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name='_api_impot_particulier_resource_users_+',
|
||||
related_query_name='+',
|
||||
to='base.ApiUser',
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'API Impot Particulier',
|
||||
},
|
||||
),
|
||||
]
|
|
@ -1,22 +0,0 @@
|
|||
# Generated by Django 3.2.18 on 2023-05-25 09:49
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('api_impot_particulier', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='resource',
|
||||
name='id_teleservice',
|
||||
field=models.TextField(max_length=128, verbose_name='DGFIP API ID_Teleservice'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='resource',
|
||||
name='oauth_scopes',
|
||||
field=models.CharField(max_length=128, verbose_name='DGFIP API Scopes'),
|
||||
),
|
||||
]
|
|
@ -1,306 +0,0 @@
|
|||
# passerelle - uniform access to multiple data sources and services
|
||||
# Copyright (C) 2023 Entr'ouvert
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
import hashlib
|
||||
import uuid
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import requests
|
||||
from django.core.cache import cache
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from passerelle.base.models import BaseResource
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
from passerelle.utils.timeout import Timeout
|
||||
|
||||
|
||||
class ServiceIsDown(APIError):
|
||||
def __init__(self):
|
||||
super().__init__(_('API Impot Particulier service is unavailable'))
|
||||
|
||||
def __str__(self):
|
||||
if self.__context__:
|
||||
return f'{super().__str__()}: {self.__context__}'
|
||||
return super().__str__()
|
||||
|
||||
|
||||
class Resource(BaseResource):
|
||||
api_url = models.URLField(
|
||||
_('DGFIP API base URL'),
|
||||
max_length=256,
|
||||
default='https://gw.dgfip.finances.gouv.fr/impotparticulier/1.0',
|
||||
)
|
||||
oauth_username = models.CharField(_('DGFIP API Username'), max_length=128)
|
||||
oauth_password = models.CharField(_('DGFIP API Password'), max_length=128)
|
||||
oauth_scopes = models.CharField(_('DGFIP API Scopes'), max_length=128)
|
||||
id_teleservice = models.TextField(_('DGFIP API ID_Teleservice'), max_length=128)
|
||||
|
||||
log_requests_errors = False
|
||||
requests_timeout = 30
|
||||
requests_max_retries = {
|
||||
'total': 3,
|
||||
'backoff_factor': 0.5,
|
||||
'allowed_methods': ['GET', 'POST'],
|
||||
# retry after: 0.5, 1.5 and 3.5 seconds
|
||||
'status_forcelist': [413, 429, 503, 504],
|
||||
}
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('API Impot Particulier')
|
||||
|
||||
@classmethod
|
||||
def parse_numero_fiscal(cls, value):
|
||||
value = value.strip().replace(' ', '')
|
||||
if not (value and value.isascii() and value.isdigit()):
|
||||
raise APIError(_('invalid numero_fiscal'))
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def parse_annee_de_revenu(cls, value):
|
||||
try:
|
||||
value = int(value)
|
||||
except (TypeError, ValueError):
|
||||
raise APIError(_('invalid annee_de_revenu'))
|
||||
today = datetime.date.today()
|
||||
if not (0 < today.year - value < 10):
|
||||
raise APIError(_('invalid annee_de_revenu'))
|
||||
return value
|
||||
|
||||
@endpoint(
|
||||
name='spi-situations-ir-assiettes-annrev',
|
||||
description=_('Provides revenue tax situation for a specific year.'),
|
||||
parameters={
|
||||
'numero_fiscal': {
|
||||
'description': _('Tax number of the person'),
|
||||
},
|
||||
'annee_de_revenu': {
|
||||
'description': _('Income year'),
|
||||
},
|
||||
},
|
||||
)
|
||||
def spi_situations_ir_assiettes_annrev(self, request, numero_fiscal, annee_de_revenu):
|
||||
numero_fiscal = self.parse_numero_fiscal(numero_fiscal)
|
||||
annee_de_revenu = self.parse_annee_de_revenu(annee_de_revenu)
|
||||
return {
|
||||
'data': self.get_spi_situations_ir_assiettes_annrev(
|
||||
numero_fiscal=numero_fiscal, annee_de_revenu=annee_de_revenu, timeout=Timeout(20)
|
||||
)
|
||||
}
|
||||
|
||||
def get_spi_situations_ir_assiettes_annrev(self, numero_fiscal, annee_de_revenu, timeout=None):
|
||||
return self.call(
|
||||
name='spi-situations-ir-assiettes-deuxans',
|
||||
endpoint_template='spi/{spi}/situations/ir/assiettes/annrev/{annrev}',
|
||||
timeout=timeout,
|
||||
spi=numero_fiscal,
|
||||
annrev=annee_de_revenu,
|
||||
accept='application/prs.dgfip.part.situations.ir.assiettes.v1+json',
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
name='spi-situations-th-assiettes-principale-annrev',
|
||||
description=_('Provides housing tax situation for a specific year.'),
|
||||
parameters={
|
||||
'numero_fiscal': {
|
||||
'description': _('Tax number of the person'),
|
||||
},
|
||||
'annee_de_revenu': {
|
||||
'description': _('Income year'),
|
||||
},
|
||||
},
|
||||
)
|
||||
def spi_situations_th_assiettes_principale_annrev(self, request, numero_fiscal, annee_de_revenu):
|
||||
numero_fiscal = self.parse_numero_fiscal(numero_fiscal)
|
||||
annee_de_revenu = self.parse_annee_de_revenu(annee_de_revenu)
|
||||
return {
|
||||
'data': self.get_spi_situations_th_assiettes_principale_annrev(
|
||||
numero_fiscal=numero_fiscal, annee_de_revenu=annee_de_revenu, timeout=Timeout(20)
|
||||
)
|
||||
}
|
||||
|
||||
def get_spi_situations_th_assiettes_principale_annrev(self, numero_fiscal, annee_de_revenu, timeout=None):
|
||||
return self.call(
|
||||
name='spi-situations-th-assiettes-principale-deuxans',
|
||||
endpoint_template='spi/{spi}/situations/th/assiettes/principale/annrev/{annrev}',
|
||||
timeout=timeout,
|
||||
spi=numero_fiscal,
|
||||
annrev=annee_de_revenu,
|
||||
accept='application/prs.dgfip.part.situations.th.assiettes.v1+json',
|
||||
)
|
||||
|
||||
def call(self, name, endpoint_template, timeout=None, **kwargs):
|
||||
correlation_id = str(uuid.uuid4().hex)
|
||||
kwargs_formatted = ', '.join(f'{key}={value}' for key, value in kwargs.items())
|
||||
try:
|
||||
data = self.get_tax_data(
|
||||
session=self.requests,
|
||||
base_url=self.api_url,
|
||||
access_token=self._get_access_token(timeout=timeout),
|
||||
correlation_id=correlation_id,
|
||||
endpoint_template=endpoint_template,
|
||||
id_teleservice=self.id_teleservice,
|
||||
timeout=timeout,
|
||||
**kwargs,
|
||||
)
|
||||
except ServiceIsDown as e:
|
||||
self.logger.warning(
|
||||
'%s(%s) failed: %s',
|
||||
name,
|
||||
kwargs_formatted,
|
||||
e,
|
||||
extra={
|
||||
'correlation_id': correlation_id,
|
||||
'id_teleservice': self.id_teleservice,
|
||||
'kwargs': kwargs,
|
||||
},
|
||||
)
|
||||
raise
|
||||
else:
|
||||
self.logger.warning(
|
||||
'%s(%s) success',
|
||||
name,
|
||||
kwargs_formatted,
|
||||
extra={
|
||||
'data': data,
|
||||
'correlation_id': correlation_id,
|
||||
'id_teleservice': self.id_teleservice,
|
||||
'kwargs': kwargs,
|
||||
},
|
||||
)
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def get_tax_data(
|
||||
cls,
|
||||
session,
|
||||
base_url,
|
||||
access_token,
|
||||
correlation_id,
|
||||
endpoint_template,
|
||||
accept,
|
||||
id_teleservice=None,
|
||||
headers=None,
|
||||
timeout=None,
|
||||
**kwargs,
|
||||
):
|
||||
headers = {
|
||||
**(headers or {}),
|
||||
'Authorization': f'Bearer {access_token}',
|
||||
'X-Correlation-ID': correlation_id,
|
||||
'Accept': accept,
|
||||
}
|
||||
if id_teleservice:
|
||||
headers['ID_Teleservice'] = id_teleservice
|
||||
|
||||
endpoint = endpoint_template.format(**kwargs)
|
||||
if not base_url.endswith('/'):
|
||||
base_url += '/'
|
||||
url = urljoin(base_url, endpoint)
|
||||
|
||||
if timeout is not None:
|
||||
timeout = float(timeout)
|
||||
|
||||
# api-impot-particulier error reporting is byzantine, some errors are
|
||||
# accompanied by a 4xx code, some others with a 20x code, some have a
|
||||
# JSON content, other are only identified by a codeapp header on
|
||||
# the response
|
||||
try:
|
||||
response = session.get(url, headers=headers, timeout=timeout)
|
||||
response.raise_for_status()
|
||||
except requests.HTTPError:
|
||||
try:
|
||||
content = response.json()['erreur']
|
||||
except (ValueError, KeyError):
|
||||
try:
|
||||
raise APIError(
|
||||
'api-impot-particulier error', data={'codeapp': response.headers['codeapp']}
|
||||
)
|
||||
except KeyError:
|
||||
pass
|
||||
raise ServiceIsDown
|
||||
raise APIError('api-impot-particulier-error', data=content)
|
||||
except requests.RequestException:
|
||||
raise ServiceIsDown
|
||||
|
||||
if response.status_code != 200:
|
||||
try:
|
||||
content = response.json()['erreur']
|
||||
except (ValueError, KeyError):
|
||||
try:
|
||||
raise APIError(
|
||||
'api-impot-particulier-error', data={'codeapp': response.headers['codeapp']}
|
||||
)
|
||||
except KeyError:
|
||||
raise ServiceIsDown
|
||||
raise APIError('api-impot-particulier error', data=content)
|
||||
|
||||
try:
|
||||
response_data = response.json()
|
||||
except ValueError:
|
||||
raise ServiceIsDown
|
||||
return response_data
|
||||
|
||||
def _get_access_token(self, timeout=None):
|
||||
key = (
|
||||
'dgfip-at-'
|
||||
+ hashlib.sha256(
|
||||
f'{self.oauth_username}-{self.oauth_password}-{self.api_url}'.encode()
|
||||
).hexdigest()
|
||||
)
|
||||
|
||||
access_token = cache.get(key)
|
||||
if not access_token:
|
||||
access_token = self.get_access_token(
|
||||
session=self.requests,
|
||||
base_url=self.api_url,
|
||||
username=self.oauth_username,
|
||||
password=self.oauth_password,
|
||||
scope=self.oauth_scopes,
|
||||
timeout=timeout,
|
||||
)
|
||||
cache.set(key, access_token, 300)
|
||||
return access_token
|
||||
|
||||
@classmethod
|
||||
def get_access_token(cls, session, base_url, username, password, scope, timeout=None):
|
||||
data = {
|
||||
'grant_type': 'client_credentials',
|
||||
}
|
||||
if scope:
|
||||
data['scope'] = scope
|
||||
|
||||
url = urljoin(base_url, '/token')
|
||||
|
||||
if timeout is not None:
|
||||
timeout = float(timeout)
|
||||
|
||||
try:
|
||||
response = session.post(url, data=data, auth=(username, password), timeout=timeout)
|
||||
response.raise_for_status()
|
||||
except requests.RequestException:
|
||||
raise ServiceIsDown
|
||||
try:
|
||||
response_data = response.json()
|
||||
access_token = response_data['access_token']
|
||||
response_data = response.json()
|
||||
except (ValueError, KeyError, TypeError):
|
||||
raise ServiceIsDown
|
||||
return access_token
|
|
@ -17,9 +17,8 @@ KNOWN_ERRORS = {
|
|||
'Pas de droit sur la période demandée pour la prestation sélectionnée et le bénéficiaire choisi',
|
||||
'Pas de droit sur la période demandée pour la prestation sélectionnée.',
|
||||
"Votre quotient familial (Qf) sur cette période est non disponible. Pour plus d'information, contactez-nous.",
|
||||
# API particulier error messages not from the source above
|
||||
# API particulier error message not from the source above
|
||||
'Les paramètres fournis sont incorrects ou ne correspondent pas à un avis',
|
||||
"L'identifiant indiqué n'existe pas, n'est pas connu ou ne comporte aucune information pour cet appel.",
|
||||
},
|
||||
400: {
|
||||
'Absence de code confidentiel. Le document ne peut être édité.',
|
||||
|
@ -31,8 +30,6 @@ KNOWN_ERRORS = {
|
|||
'Il existe des droits pour la prestation sélectionnée sur le dossier et/ou la période demandée',
|
||||
'Il existe des droits pour la prestation sélectionnée sur le dossier et/ou la période demandée (après date du jour)',
|
||||
'L’opérateurs téléphonique» ne propose pas de raccordement SMS avec un prestataire externe (raccordement avec un numéro court). ',
|
||||
# API particulier error messages not from the source above
|
||||
"La référence de l'avis n'est pas correctement formatée",
|
||||
},
|
||||
500: {
|
||||
'Les informations souhaitées sont momentanément indisponibles. Merci de renouveler votre demande ultérieurement.',
|
||||
|
@ -42,7 +39,7 @@ KNOWN_ERRORS = {
|
|||
"Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. Des paramètres manquent.",
|
||||
(
|
||||
"Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. "
|
||||
'La taille du message ne doit pas être supérieure à 160 caractères.'
|
||||
"La taille du message ne doit pas être supérieure à 160 caractères."
|
||||
),
|
||||
(
|
||||
"Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. "
|
||||
|
@ -53,7 +50,7 @@ KNOWN_ERRORS = {
|
|||
"Votre demande n'a pu aboutir en raison d'une erreur technique lié à l'appel au service IMC.",
|
||||
(
|
||||
"Votre demande n’a pu aboutir en raison d'un problème technique lié aux données entrantes du webservice. "
|
||||
'Merci de renouveler votre demande ultérieurement.'
|
||||
"Merci de renouveler votre demande ultérieurement."
|
||||
),
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,17 +0,0 @@
|
|||
# Generated by Django 3.2.18 on 2023-12-13 10:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('api_particulier', '0006_api_key_length_1024'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='apiparticulier',
|
||||
name='api_key',
|
||||
field=models.CharField(blank=True, default='', max_length=2048, verbose_name='API key'),
|
||||
),
|
||||
]
|
|
@ -63,7 +63,7 @@ class APIParticulier(BaseResource):
|
|||
choices=[(key, platform['label']) for key, platform in PLATFORMS.items()],
|
||||
)
|
||||
|
||||
api_key = models.CharField(max_length=2048, default='', blank=True, verbose_name=_('API key'))
|
||||
api_key = models.CharField(max_length=1024, default='', blank=True, verbose_name=_('API key'))
|
||||
|
||||
log_requests_errors = False
|
||||
|
||||
|
@ -170,6 +170,7 @@ class APIParticulier(BaseResource):
|
|||
self.save()
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
description=_('Get scopes available'),
|
||||
display_order=1,
|
||||
)
|
||||
|
@ -183,6 +184,7 @@ class APIParticulier(BaseResource):
|
|||
}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
show=False,
|
||||
description=_('Get citizen\'s fiscal informations'),
|
||||
parameters={
|
||||
|
@ -206,6 +208,7 @@ class APIParticulier(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='avis-imposition',
|
||||
perm='can_access',
|
||||
description=_('Get citizen\'s fiscal informations'),
|
||||
parameters={
|
||||
'numero_fiscal': {
|
||||
|
@ -300,6 +303,7 @@ class APIParticulier(BaseResource):
|
|||
return data
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
show=False,
|
||||
description=_('Get family allowances recipient informations'),
|
||||
parameters={
|
||||
|
@ -323,6 +327,7 @@ class APIParticulier(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='situation-familiale',
|
||||
perm='can_access',
|
||||
description=_('Get family allowances recipient informations'),
|
||||
parameters={
|
||||
'code_postal': {
|
||||
|
@ -358,11 +363,6 @@ class APIParticulier(BaseResource):
|
|||
)
|
||||
data['data']['numero_allocataire'] = numero_allocataire
|
||||
data['data']['code_postal'] = code_postal
|
||||
for kind in 'allocataires', 'enfants':
|
||||
for person in data['data'].get(kind) or []:
|
||||
if len(person.get('dateDeNaissance') or '') == 8:
|
||||
birthdate = person['dateDeNaissance']
|
||||
person['dateDeNaissance_iso'] = birthdate[4:] + '-' + birthdate[2:4] + '-' + birthdate[:2]
|
||||
return data
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import json
|
||||
import string
|
||||
from urllib import parse as urlparse
|
||||
|
||||
|
@ -33,42 +32,6 @@ from passerelle.utils.conversion import num2deg
|
|||
from passerelle.utils.jsonresponse import APIError
|
||||
from passerelle.utils.templates import render_to_string, validate_template
|
||||
|
||||
EDIT_ITEM_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'Item schema',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'geometry': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'x': {'type': 'string'},
|
||||
'y': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
'attributes': {'type': 'object'},
|
||||
},
|
||||
'required': ['attributes'],
|
||||
}
|
||||
|
||||
EDIT_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'Edit payload',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'adds': {
|
||||
'type': 'array',
|
||||
'description': 'Adds object',
|
||||
'items': EDIT_ITEM_SCHEMA,
|
||||
},
|
||||
'updates': {'type': 'array', 'description': 'Updates object', 'items': EDIT_ITEM_SCHEMA},
|
||||
'deletes': {'type': 'array', 'description': 'Deletes object', 'items': {'type': 'string'}},
|
||||
},
|
||||
'minProperties': 1,
|
||||
'unflatten': True,
|
||||
}
|
||||
|
||||
|
||||
class ArcGISError(APIError):
|
||||
pass
|
||||
|
@ -214,6 +177,7 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='mapservice-query',
|
||||
description=_('Map Service Query'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'folder': {
|
||||
'description': _('Folder name'),
|
||||
|
@ -283,6 +247,7 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='featureservice-query',
|
||||
description=_('Feature Service Query'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'folder': {
|
||||
'description': _('Folder name'),
|
||||
|
@ -353,49 +318,9 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
text_fieldname=text_fieldname,
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
name='featureservice-applyedits',
|
||||
description=_('Feature Service Apply Edits'),
|
||||
parameters={
|
||||
'folder': {
|
||||
'description': _('Folder name'),
|
||||
'example_value': 'Specialty',
|
||||
},
|
||||
'service': {
|
||||
'description': _('Service name'),
|
||||
'example_value': 'ESRI_StateCityHighway_USA',
|
||||
},
|
||||
'layer': {
|
||||
'description': _('Layer or table name'),
|
||||
'example_value': '1',
|
||||
},
|
||||
},
|
||||
post={'request_body': {'schema': {'application/json': EDIT_SCHEMA}}},
|
||||
)
|
||||
def featureservice_applyedits(
|
||||
self,
|
||||
request,
|
||||
post_data,
|
||||
service,
|
||||
layer='0',
|
||||
folder='',
|
||||
):
|
||||
# implement "apply edits" feature service
|
||||
# https://developers.arcgis.com/rest/services-reference/enterprise/apply-edits-feature-service-layer-.htm
|
||||
uri = 'services/'
|
||||
if folder:
|
||||
uri += folder + '/'
|
||||
uri = uri + service + '/FeatureServer/' + layer + '/applyEdits'
|
||||
params = {'f': 'pjson'}
|
||||
for key, value in post_data.items():
|
||||
post_data[key] = json.dumps(value)
|
||||
params.update(post_data)
|
||||
return {'data': self.request(urlparse.urljoin(self.base_url, uri), data=params)}
|
||||
|
||||
@endpoint(
|
||||
name='tile',
|
||||
description=_('Tiles layer'),
|
||||
perm='OPEN',
|
||||
pattern=r'^(?P<layer>[\w/]+)/(?P<zoom>\d+)/(?P<tile_x>\d+)/(?P<tile_y>\d+)\.png$',
|
||||
)
|
||||
def tile(self, request, layer, zoom, tile_x, tile_y):
|
||||
|
@ -424,6 +349,7 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
name='q',
|
||||
description=_('Query'),
|
||||
pattern=r'^(?P<query_slug>[\w:_-]+)/$',
|
||||
perm='can_access',
|
||||
show=False,
|
||||
)
|
||||
def q(self, request, query_slug, q=None, full=False, **kwargs):
|
||||
|
|
|
@ -75,21 +75,13 @@ class ArpegeECP(BaseResource):
|
|||
@endpoint(
|
||||
name='api',
|
||||
pattern=r'^users/(?P<nameid>\w+)/forms$',
|
||||
example_pattern='users/{nameid}/forms',
|
||||
description=_('Returns user forms'),
|
||||
parameters={
|
||||
'nameid': {'description': _('Publik ID'), 'example_value': 'nameid'},
|
||||
'status': {'description': _('Demands status'), 'example_value': 'pending'},
|
||||
},
|
||||
perm='can_access',
|
||||
description='Returns user forms',
|
||||
)
|
||||
def get_user_forms(self, request, nameid, status='pending'):
|
||||
def get_user_forms(self, request, nameid):
|
||||
access_token = self.get_access_token(nameid)
|
||||
url = urlparse.urljoin(self.webservice_base_url, 'DemandesUsager')
|
||||
params = {'scope': 'data_administratives'}
|
||||
if status == 'pending':
|
||||
params['EtatDemande'] = 'DEPOSEE, ENCRSINSTR' # value for filtering pending forms
|
||||
elif status == 'done':
|
||||
params['EtatDemande'] = 'TRAITEEPOS, TRAITEENEG, TRAITEE' # value for filtering done forms
|
||||
auth = HawkAuth(self.hawk_auth_id, self.hawk_auth_key, ext=access_token)
|
||||
try:
|
||||
response = self.requests.get(url, params=params, auth=auth)
|
||||
|
@ -102,7 +94,7 @@ class ArpegeECP(BaseResource):
|
|||
except ValueError:
|
||||
raise APIError('No JSON content returned: %r' % response.content[:1000])
|
||||
if not result.get('Data'):
|
||||
raise APIError('%s (%s)' % (result.get('LibErreur'), result.get('CodErreur')))
|
||||
raise APIError("%s (%s)" % (result.get('LibErreur'), result.get('CodErreur')))
|
||||
for demand in result['Data']['results']:
|
||||
try:
|
||||
data_administratives = demand['data_administratives']
|
||||
|
|
|
@ -147,8 +147,6 @@ class ASTech(BaseResource, HTTPResource):
|
|||
|
||||
_category_ordering = [_('Parameters'), _('Rules'), _('Demand'), 'Tech & Debug']
|
||||
|
||||
log_requests_errors = False
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('AS-TECH')
|
||||
|
||||
|
@ -161,7 +159,7 @@ class ASTech(BaseResource, HTTPResource):
|
|||
try:
|
||||
content = response.json()
|
||||
except ValueError:
|
||||
content = '%r' % response.content[:1024]
|
||||
content = response.content[:1024]
|
||||
raise APIError(
|
||||
'AS-TECH response: %s %s' % (response.status_code, response.reason),
|
||||
data={
|
||||
|
@ -222,51 +220,10 @@ class ASTech(BaseResource, HTTPResource):
|
|||
json_response = self.call_json(method, url, params=params, **kwargs)
|
||||
return json_response
|
||||
|
||||
def get_view_schema(self, view_code):
|
||||
cache_key = 'astech-%s-%s-schema' % (self.id, view_code)
|
||||
schema = cache.get(cache_key)
|
||||
if schema:
|
||||
return schema
|
||||
endpoint = 'apicli/data/%s/columns' % view_code
|
||||
columns = self.call(endpoint).get('columns', [])
|
||||
schema = {}
|
||||
for column in columns:
|
||||
column.pop('des')
|
||||
code = column.pop('code')
|
||||
if column['type'] == 'NUM':
|
||||
column['operator'] = '='
|
||||
else:
|
||||
column['operator'] = 'is_equal'
|
||||
schema[code] = column
|
||||
cache.set(cache_key, schema)
|
||||
return schema
|
||||
|
||||
def build_view_filters(self, view_code, filters):
|
||||
if not filters:
|
||||
return []
|
||||
schema = self.get_view_schema(view_code)
|
||||
filters_expression = []
|
||||
for expression in filters.split(';'):
|
||||
try:
|
||||
name, value = expression.split('=')
|
||||
except ValueError:
|
||||
continue
|
||||
if value and schema[name]['length'] and len(value) > int(schema[name]['length']):
|
||||
raise APIError(
|
||||
_('Value of %s exceeds authorized length (%s)') % (name, schema[name]['length'])
|
||||
)
|
||||
filters_expression.append(
|
||||
{
|
||||
'field': name,
|
||||
'type': schema[name]['type'],
|
||||
'filter': {'value': value, 'operator': schema[name]['operator']},
|
||||
}
|
||||
)
|
||||
return filters_expression
|
||||
|
||||
@endpoint(
|
||||
name='connections',
|
||||
description=_('See all possible connections codes (see configuration)'),
|
||||
perm='can_access',
|
||||
display_category='Tech & Debug',
|
||||
display_order=1,
|
||||
)
|
||||
|
@ -276,6 +233,7 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='authorization',
|
||||
description=_('See authorization tokens (testing only)'),
|
||||
perm='can_access',
|
||||
display_category='Tech & Debug',
|
||||
display_order=2,
|
||||
)
|
||||
|
@ -284,7 +242,8 @@ class ASTech(BaseResource, HTTPResource):
|
|||
|
||||
@endpoint(
|
||||
name='services',
|
||||
description=_('List authorized services for connected user'),
|
||||
description=_("List authorized services for connected user"),
|
||||
perm='can_access',
|
||||
display_category=_('Rules'),
|
||||
display_order=1,
|
||||
)
|
||||
|
@ -297,6 +256,7 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='company',
|
||||
description=_('Company code of the applicant'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'applicant': {
|
||||
'description': _(
|
||||
|
@ -318,6 +278,7 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='companies',
|
||||
description=_('List of authorized companies for an applicant'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'applicant': {
|
||||
'description': _(
|
||||
|
@ -344,8 +305,6 @@ class ASTech(BaseResource, HTTPResource):
|
|||
'designation': True,
|
||||
}
|
||||
companies = self.call('apicli/rule-call-by-alias/societes_demandeur/invoke', json=payload)
|
||||
if not isinstance(companies, dict):
|
||||
raise APIError('Invalid response: %s' % companies)
|
||||
companies = [{'id': str(key), 'text': value} for key, value in companies.items()]
|
||||
companies.sort(key=lambda item: item['id']) # "same as output" sort
|
||||
return {'data': companies}
|
||||
|
@ -353,6 +312,7 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='labels',
|
||||
description=_('List of predefined labels for a company'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'company': {
|
||||
'description': _('Company code (societeDemandeur). If absent, use "company" endpoint result')
|
||||
|
@ -367,15 +327,14 @@ class ASTech(BaseResource, HTTPResource):
|
|||
labels = self.call(
|
||||
'apicli/rule-call-by-alias/libelles_predefinis/invoke', json={'societeDemandeur': company}
|
||||
)
|
||||
if not isinstance(labels, dict):
|
||||
raise APIError('Invalid response: %s' % labels)
|
||||
labels = [{'id': str(key), 'text': value} for key, value in labels.items()]
|
||||
labels.sort(key=lambda item: item['id']) # "same as output" sort
|
||||
return {'data': labels}
|
||||
|
||||
@endpoint(
|
||||
name='parameter',
|
||||
description=_('Value of a parameter'),
|
||||
description=_("Value of a parameter"),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'name': {'description': _('Name of the parameter'), 'example_value': 'LIBELDEMDEF'},
|
||||
'company': {'description': _('Company code. If absent, use "company" endpoint result')},
|
||||
|
@ -395,6 +354,7 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='create-demand',
|
||||
description=_('Create a demand'),
|
||||
perm='can_access',
|
||||
methods=['post'],
|
||||
post={'request_body': {'schema': {'application/json': DEMAND_SCHEMA}}},
|
||||
display_category=_('Demand'),
|
||||
|
@ -432,6 +392,7 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='add-document',
|
||||
description=_('Add a document in a demand'),
|
||||
perm='can_access',
|
||||
methods=['post'],
|
||||
post={'request_body': {'schema': {'application/json': ADD_DOCUMENT_SCHEMA}}},
|
||||
display_category=_('Demand'),
|
||||
|
@ -453,6 +414,7 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='demand-position',
|
||||
description=_('Get demand position'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'demand_id': {
|
||||
'description': _('Demand id'),
|
||||
|
@ -474,6 +436,7 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='demand-all-positions',
|
||||
description=_('List all demand possible positions'),
|
||||
perm='can_access',
|
||||
display_category=_('Demand'),
|
||||
display_order=4,
|
||||
)
|
||||
|
@ -484,69 +447,3 @@ class ASTech(BaseResource, HTTPResource):
|
|||
position['id'] = position['position']
|
||||
position['text'] = position['positionLib']
|
||||
return {'data': positions}
|
||||
|
||||
@endpoint(
|
||||
name='list-views',
|
||||
display_order=1,
|
||||
description=_('List available views'),
|
||||
display_category=_('Referential'),
|
||||
)
|
||||
def list_views(self, request):
|
||||
results = self.call('apicli/data/views')
|
||||
astech_views = results.get('views', [])
|
||||
for view in astech_views:
|
||||
view['id'] = view['apivId']
|
||||
view['text'] = view['apivNom']
|
||||
return {'data': astech_views}
|
||||
|
||||
@endpoint(
|
||||
name='get-view-columns',
|
||||
display_order=2,
|
||||
description=_('Get view columns'),
|
||||
display_category=_('Referential'),
|
||||
parameters={
|
||||
'code': {
|
||||
'description': _('View code'),
|
||||
'example_value': 'ASTECH_BIENS',
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_view_columns(self, request, code):
|
||||
endpoint = 'apicli/data/%s/columns' % code
|
||||
results = self.call(endpoint)
|
||||
columns = results.get('columns', [])
|
||||
for column in columns:
|
||||
column['id'] = column['code']
|
||||
column['text'] = column['des']
|
||||
return {'data': columns}
|
||||
|
||||
@endpoint(
|
||||
name='get-view-data',
|
||||
display_order=3,
|
||||
description=_('Get view data'),
|
||||
display_category=_('Referential'),
|
||||
datasource=True,
|
||||
parameters={
|
||||
'code': {
|
||||
'description': _('View code'),
|
||||
'example_value': 'ASTECH_BIENS',
|
||||
},
|
||||
'id_column': {'description': _('Name of column contaning the id'), 'example_value': 'BIEN_ID'},
|
||||
'text_column': {
|
||||
'description': _('Name of column contaning the label'),
|
||||
'example_value': 'DESIGNATION',
|
||||
},
|
||||
'filters': {
|
||||
'description': _('Semicolon separated filter expressions'),
|
||||
'example_value': 'GENRE=SIT;SECTEUR=S1',
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_view_data(self, request, code, id_column, text_column, filters=None):
|
||||
endpoint = 'apicli/data/%s/results' % code
|
||||
filters = self.build_view_filters(code, filters)
|
||||
results = self.call(endpoint, json={'data': {'filters': filters}})
|
||||
for result in results:
|
||||
result['id'] = result[id_column]
|
||||
result['text'] = result[text_column]
|
||||
return {'data': results}
|
||||
|
|
|
@ -282,6 +282,7 @@ class AstreREST(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
name='gf-documents-entites-getref',
|
||||
parameters=GF_DOCUMENTS_ENTITIES_GETFREF_PARAMS,
|
||||
)
|
||||
|
@ -293,6 +294,7 @@ class AstreREST(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
name='gf-documents-entites-list',
|
||||
parameters=GF_DOCUMENTS_ENTITIES_LIST_PARAMS,
|
||||
)
|
||||
|
@ -321,6 +323,7 @@ class AstreREST(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
name='gf-documents-entites-read',
|
||||
parameters=GF_DOCUMENTS_ENTITIES_READ_PARAMS,
|
||||
)
|
||||
|
@ -332,6 +335,7 @@ class AstreREST(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
name='gf-documents-entites-search',
|
||||
parameters=GF_DOCUMENTS_ENTITIES_SEARCH_PARAMS,
|
||||
)
|
||||
|
@ -360,6 +364,7 @@ class AstreREST(BaseResource):
|
|||
@endpoint(
|
||||
name='gf-documents-gedmanager-document-create',
|
||||
description=_('Create document'),
|
||||
perm='can_access',
|
||||
post={
|
||||
'request_body': {
|
||||
'schema': {
|
||||
|
@ -382,6 +387,7 @@ class AstreREST(BaseResource):
|
|||
@endpoint(
|
||||
name='gf-documents-gedmanager-document-delete',
|
||||
description=_('Delete document'),
|
||||
perm='can_access',
|
||||
post={
|
||||
'request_body': {
|
||||
'schema': {
|
||||
|
@ -410,6 +416,7 @@ class AstreREST(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
name='gf-documents-gedmanager-document-read',
|
||||
parameters=GF_DOCUMENTS_DOCUMENT_READ_PARAMS,
|
||||
)
|
||||
|
@ -426,6 +433,7 @@ class AstreREST(BaseResource):
|
|||
@endpoint(
|
||||
name='gf-documents-gedmanager-document-update',
|
||||
description=_('Update document'),
|
||||
perm='can_access',
|
||||
post={
|
||||
'request_body': {
|
||||
'schema': {
|
||||
|
@ -445,7 +453,7 @@ class AstreREST(BaseResource):
|
|||
)
|
||||
}
|
||||
|
||||
@endpoint(methods=['get'], name='gf-documents-referentiel-domainepj')
|
||||
@endpoint(methods=['get'], perm='can_access', name='gf-documents-referentiel-domainepj')
|
||||
def gf_documents_referentiel_domainepj(self, request):
|
||||
return {
|
||||
'data': self._get_data_source(
|
||||
|
@ -453,7 +461,7 @@ class AstreREST(BaseResource):
|
|||
)
|
||||
}
|
||||
|
||||
@endpoint(methods=['get'], name='gf-documents-referentiel-typedocument')
|
||||
@endpoint(methods=['get'], perm='can_access', name='gf-documents-referentiel-typedocument')
|
||||
def gf_documents_referentiel_typedocument(self, request):
|
||||
return {
|
||||
'data': self._get_data_source(
|
||||
|
|
|
@ -28,164 +28,164 @@ from passerelle.utils.jsonresponse import APIError
|
|||
from passerelle.utils.validation import is_number
|
||||
|
||||
ASSOCIATION_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS assocation',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'Financier',
|
||||
'CodeFamille',
|
||||
'CatTiers',
|
||||
'NomEnregistrement',
|
||||
'StatutTiers',
|
||||
'Type',
|
||||
'AdresseTitre',
|
||||
'AdresseIsAdresseDeCommande',
|
||||
'AdresseIsAdresseDeFacturation',
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS assocation",
|
||||
"description": "",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"Financier",
|
||||
"CodeFamille",
|
||||
"CatTiers",
|
||||
"NomEnregistrement",
|
||||
"StatutTiers",
|
||||
"Type",
|
||||
"AdresseTitre",
|
||||
"AdresseIsAdresseDeCommande",
|
||||
"AdresseIsAdresseDeFacturation",
|
||||
],
|
||||
'properties': {
|
||||
'Financier': {'description': 'financial association', 'type': 'string', 'enum': ['true', 'false']},
|
||||
'CodeFamille': {
|
||||
'description': 'association family code',
|
||||
'type': 'string',
|
||||
"properties": {
|
||||
"Financier": {"description": "financial association", "type": "string", "enum": ["true", "false"]},
|
||||
"CodeFamille": {
|
||||
"description": "association family code",
|
||||
"type": "string",
|
||||
},
|
||||
'CatTiers': {
|
||||
'description': 'association category',
|
||||
'type': 'string',
|
||||
"CatTiers": {
|
||||
"description": "association category",
|
||||
"type": "string",
|
||||
},
|
||||
'NomEnregistrement': {
|
||||
'description': 'association name',
|
||||
'type': 'string',
|
||||
"NomEnregistrement": {
|
||||
"description": "association name",
|
||||
"type": "string",
|
||||
},
|
||||
'StatutTiers': {
|
||||
'description': 'association status',
|
||||
'type': 'string',
|
||||
'enum': ['PROPOSE', 'VALIDE', 'REFUSE', 'BLOQUE', 'A COMPLETER'],
|
||||
"StatutTiers": {
|
||||
"description": "association status",
|
||||
"type": "string",
|
||||
"enum": ["PROPOSE", "VALIDE", "REFUSE", "BLOQUE", "A COMPLETER"],
|
||||
},
|
||||
'Type': {'description': 'association type', 'type': 'string', 'enum': ['D', 'F', '*']},
|
||||
'NumeroSiret': {
|
||||
'description': 'SIREN number',
|
||||
'type': 'string',
|
||||
"Type": {"description": "association type", "type": "string", "enum": ["D", "F", "*"]},
|
||||
"NumeroSiret": {
|
||||
"description": "SIREN number",
|
||||
"type": "string",
|
||||
},
|
||||
'NumeroSiretFin': {
|
||||
'description': 'NIC number',
|
||||
'type': 'string',
|
||||
"NumeroSiretFin": {
|
||||
"description": "NIC number",
|
||||
"type": "string",
|
||||
},
|
||||
'AdresseTitre': {
|
||||
'type': 'string',
|
||||
"AdresseTitre": {
|
||||
"type": "string",
|
||||
},
|
||||
'AdresseIsAdresseDeCommande': {'type': 'string', 'enum': ['true', 'false']},
|
||||
'AdresseIsAdresseDeFacturation': {'type': 'string', 'enum': ['true', 'false']},
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
"AdresseIsAdresseDeCommande": {"type": "string", "enum": ["true", "false"]},
|
||||
"AdresseIsAdresseDeFacturation": {"type": "string", "enum": ["true", "false"]},
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
},
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
},
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
CONTACT_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS contact',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'CodeContact',
|
||||
'CodeTitreCivilite',
|
||||
'Nom',
|
||||
'AdresseDestinataire',
|
||||
'CodePostal',
|
||||
'Ville',
|
||||
'EncodeKeyStatut',
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS contact",
|
||||
"description": "",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"CodeContact",
|
||||
"CodeTitreCivilite",
|
||||
"Nom",
|
||||
"AdresseDestinataire",
|
||||
"CodePostal",
|
||||
"Ville",
|
||||
"EncodeKeyStatut",
|
||||
],
|
||||
'properties': {
|
||||
'CodeContact': {
|
||||
'type': 'string',
|
||||
"properties": {
|
||||
"CodeContact": {
|
||||
"type": "string",
|
||||
},
|
||||
'CodeTitreCivilite': {
|
||||
'type': 'string',
|
||||
"CodeTitreCivilite": {
|
||||
"type": "string",
|
||||
},
|
||||
'Nom': {
|
||||
'type': 'string',
|
||||
"Nom": {
|
||||
"type": "string",
|
||||
},
|
||||
'AdresseDestinataire': {
|
||||
'type': 'string',
|
||||
"AdresseDestinataire": {
|
||||
"type": "string",
|
||||
},
|
||||
'CodePostal': {
|
||||
'type': 'string',
|
||||
"CodePostal": {
|
||||
"type": "string",
|
||||
},
|
||||
'Ville': {
|
||||
'type': 'string',
|
||||
"Ville": {
|
||||
"type": "string",
|
||||
},
|
||||
'EncodeKeyStatut': {
|
||||
'type': 'string',
|
||||
"EncodeKeyStatut": {
|
||||
"type": "string",
|
||||
},
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
},
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
},
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
DOCUMENT_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS assocation',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'Sujet',
|
||||
'Entite',
|
||||
'CodType',
|
||||
'Type',
|
||||
'hdnCodeTrt',
|
||||
'EncodeKeyEntite',
|
||||
'CodeDomaine',
|
||||
'CodDom',
|
||||
'document',
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS assocation",
|
||||
"description": "",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"Sujet",
|
||||
"Entite",
|
||||
"CodType",
|
||||
"Type",
|
||||
"hdnCodeTrt",
|
||||
"EncodeKeyEntite",
|
||||
"CodeDomaine",
|
||||
"CodDom",
|
||||
"document",
|
||||
],
|
||||
'properties': {
|
||||
'Sujet': {
|
||||
'type': 'string',
|
||||
"properties": {
|
||||
"Sujet": {
|
||||
"type": "string",
|
||||
},
|
||||
'Entite': {
|
||||
'type': 'string',
|
||||
"Entite": {
|
||||
"type": "string",
|
||||
},
|
||||
'CodType': {
|
||||
'type': 'string',
|
||||
"CodType": {
|
||||
"type": "string",
|
||||
},
|
||||
'Type': {
|
||||
'type': 'string',
|
||||
"Type": {
|
||||
"type": "string",
|
||||
},
|
||||
'hdnCodeTrt': {
|
||||
'type': 'string',
|
||||
"hdnCodeTrt": {
|
||||
"type": "string",
|
||||
},
|
||||
'EncodeKeyEntite': {
|
||||
'type': 'string',
|
||||
"EncodeKeyEntite": {
|
||||
"type": "string",
|
||||
},
|
||||
'CodeDomaine': {
|
||||
'type': 'string',
|
||||
"CodeDomaine": {
|
||||
"type": "string",
|
||||
},
|
||||
'CodDom': {
|
||||
'type': 'string',
|
||||
"CodDom": {
|
||||
"type": "string",
|
||||
},
|
||||
'document': {
|
||||
'type': 'object',
|
||||
'required': ['filename', 'content_type', 'content'],
|
||||
"document": {
|
||||
"type": "object",
|
||||
"required": ['filename', 'content_type', 'content'],
|
||||
'properties': {
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
|
@ -198,236 +198,236 @@ DOCUMENT_SCHEMA = {
|
|||
},
|
||||
},
|
||||
},
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
},
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
},
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
GRANT_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS grant',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'Libelle',
|
||||
'LibelleCourt',
|
||||
'ModGestion',
|
||||
'TypeAide',
|
||||
'Sens',
|
||||
'CodeTiersDem',
|
||||
'CodeServiceGestionnaire',
|
||||
'CodeServiceUtilisateur',
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS grant",
|
||||
"description": "",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"Libelle",
|
||||
"LibelleCourt",
|
||||
"ModGestion",
|
||||
"TypeAide",
|
||||
"Sens",
|
||||
"CodeTiersDem",
|
||||
"CodeServiceGestionnaire",
|
||||
"CodeServiceUtilisateur",
|
||||
],
|
||||
'properties': {
|
||||
'Libelle': {
|
||||
'type': 'string',
|
||||
"properties": {
|
||||
"Libelle": {
|
||||
"type": "string",
|
||||
},
|
||||
'LibelleCourt': {
|
||||
'type': 'string',
|
||||
"LibelleCourt": {
|
||||
"type": "string",
|
||||
},
|
||||
'ModGestion': {'type': 'string', 'enum': ['1', '2', '3', '4']},
|
||||
'TypeAide': {
|
||||
'type': 'string',
|
||||
"ModGestion": {"type": "string", "enum": ["1", "2", "3", "4"]},
|
||||
"TypeAide": {
|
||||
"type": "string",
|
||||
},
|
||||
'Sens': {
|
||||
'type': 'string',
|
||||
"Sens": {
|
||||
"type": "string",
|
||||
},
|
||||
'CodeTiersDem': {
|
||||
'type': 'string',
|
||||
"CodeTiersDem": {
|
||||
"type": "string",
|
||||
},
|
||||
'CodeServiceGestionnaire': {
|
||||
'type': 'string',
|
||||
"CodeServiceGestionnaire": {
|
||||
"type": "string",
|
||||
},
|
||||
'CodeServiceUtilisateur': {
|
||||
'type': 'string',
|
||||
"CodeServiceUtilisateur": {
|
||||
"type": "string",
|
||||
},
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
},
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
},
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
INDANA_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS INDANA indicator',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'required': ['CodeDossier', 'CodeInd_1', 'AnneeInd_1', 'ValInd_1'],
|
||||
'properties': {
|
||||
'CodeDossier': {
|
||||
'type': 'string',
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS INDANA indicator",
|
||||
"description": "",
|
||||
"type": "object",
|
||||
"required": ["CodeDossier", "CodeInd_1", "AnneeInd_1", "ValInd_1"],
|
||||
"properties": {
|
||||
"CodeDossier": {
|
||||
"type": "string",
|
||||
},
|
||||
'CodeInd_1': {
|
||||
'type': 'string',
|
||||
"CodeInd_1": {
|
||||
"type": "string",
|
||||
},
|
||||
'AnneeInd_1': {
|
||||
'type': 'string',
|
||||
"AnneeInd_1": {
|
||||
"type": "string",
|
||||
},
|
||||
'ValInd_1': {
|
||||
'type': 'string',
|
||||
"ValInd_1": {
|
||||
"type": "string",
|
||||
},
|
||||
'IndAide': {
|
||||
'type': 'string',
|
||||
"IndAide": {
|
||||
"type": "string",
|
||||
},
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
},
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
},
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
INDANA_KEY_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS INDANA indicator key',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'required': ['CodeDossier', 'CodeInd_1', 'AnneeInd_1'],
|
||||
'properties': {
|
||||
'CodeDossier': {
|
||||
'type': 'string',
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS INDANA indicator key",
|
||||
"description": "",
|
||||
"type": "object",
|
||||
"required": ["CodeDossier", "CodeInd_1", "AnneeInd_1"],
|
||||
"properties": {
|
||||
"CodeDossier": {
|
||||
"type": "string",
|
||||
},
|
||||
'CodeInd_1': {
|
||||
'type': 'string',
|
||||
"CodeInd_1": {
|
||||
"type": "string",
|
||||
},
|
||||
'AnneeInd_1': {
|
||||
'type': 'string',
|
||||
"AnneeInd_1": {
|
||||
"type": "string",
|
||||
},
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
},
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
},
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
TIERS_RIB_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS TiersRib',
|
||||
'description': 'TiersRib',
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'CodeTiers',
|
||||
'CodePaiement',
|
||||
'LibelleCourt',
|
||||
'NumeroIban',
|
||||
'CleIban',
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS TiersRib",
|
||||
"description": "TiersRib",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"CodeTiers",
|
||||
"CodePaiement",
|
||||
"LibelleCourt",
|
||||
"NumeroIban",
|
||||
"CleIban",
|
||||
'CodeBic',
|
||||
'CodeDomiciliation',
|
||||
'CodeStatut',
|
||||
'CodeDevise',
|
||||
'CodeIso2Pays',
|
||||
'LibelleCompteEtranger',
|
||||
"CodeDomiciliation",
|
||||
"CodeStatut",
|
||||
"CodeDevise",
|
||||
"CodeIso2Pays",
|
||||
"LibelleCompteEtranger",
|
||||
],
|
||||
'properties': {
|
||||
'CodeDevise': {'type': 'string'},
|
||||
'CodeDomiciliation': {'type': 'string'},
|
||||
'CodeIso2Pays': {'type': 'string'},
|
||||
'CodePaiement': {'type': 'string'},
|
||||
'CodeStatut': {
|
||||
'type': 'string',
|
||||
'enum': ['PROPOSE', 'VALIDE', 'REFUSE', 'A COMPLETER', 'BLOQUE', 'EN MODIFICATION'],
|
||||
"properties": {
|
||||
"CodeDevise": {"type": "string"},
|
||||
"CodeDomiciliation": {"type": "string"},
|
||||
"CodeIso2Pays": {"type": "string"},
|
||||
"CodePaiement": {"type": "string"},
|
||||
"CodeStatut": {
|
||||
"type": "string",
|
||||
"enum": ["PROPOSE", "VALIDE", "REFUSE", "A COMPLETER", "BLOQUE", "EN MODIFICATION"],
|
||||
},
|
||||
'CodeTiers': {'type': 'string'},
|
||||
'IndicateurRibDefaut': {'type': 'string'},
|
||||
'LibelleCompteEtranger': {'type': 'string'},
|
||||
'LibelleCourt': {'type': 'string'},
|
||||
'NumeroIban': {'type': 'string'},
|
||||
'CleIban': {'type': 'string'},
|
||||
'CodeBic': {'type': 'string'},
|
||||
'IdRib': {'type': 'string'},
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
"CodeTiers": {"type": "string"},
|
||||
"IndicateurRibDefaut": {"type": "string"},
|
||||
"LibelleCompteEtranger": {"type": "string"},
|
||||
"LibelleCourt": {"type": "string"},
|
||||
"NumeroIban": {"type": "string"},
|
||||
"CleIban": {"type": "string"},
|
||||
"CodeBic": {"type": "string"},
|
||||
"IdRib": {"type": "string"},
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
},
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
},
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
TIERS_RIB_UPDATE_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS TiersRib',
|
||||
'description': 'TiersRib Update',
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'CodePaiement',
|
||||
'LibelleCourt',
|
||||
'NumeroIban',
|
||||
'CleIban',
|
||||
'CodeBic',
|
||||
'CodeDomiciliation',
|
||||
'CodeStatut',
|
||||
'CodeDevise',
|
||||
'CodeIso2Pays',
|
||||
'LibelleCompteEtranger',
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS TiersRib",
|
||||
"description": "TiersRib Update",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"CodePaiement",
|
||||
"LibelleCourt",
|
||||
"NumeroIban",
|
||||
"CleIban",
|
||||
"CodeBic",
|
||||
"CodeDomiciliation",
|
||||
"CodeStatut",
|
||||
"CodeDevise",
|
||||
"CodeIso2Pays",
|
||||
"LibelleCompteEtranger",
|
||||
],
|
||||
'properties': {
|
||||
'CodeDevise': {'type': 'string'},
|
||||
'CodeDomiciliation': {'type': 'string'},
|
||||
'CodeIso2Pays': {'type': 'string'},
|
||||
'CodePaiement': {'type': 'string'},
|
||||
'CodeStatut': {
|
||||
'type': 'string',
|
||||
'enum': ['PROPOSE', 'VALIDE', 'REFUSE', 'A COMPLETER', 'BLOQUE', 'EN MODIFICATION'],
|
||||
"properties": {
|
||||
"CodeDevise": {"type": "string"},
|
||||
"CodeDomiciliation": {"type": "string"},
|
||||
"CodeIso2Pays": {"type": "string"},
|
||||
"CodePaiement": {"type": "string"},
|
||||
"CodeStatut": {
|
||||
"type": "string",
|
||||
"enum": ["PROPOSE", "VALIDE", "REFUSE", "A COMPLETER", "BLOQUE", "EN MODIFICATION"],
|
||||
},
|
||||
'IndicateurRibDefaut': {'type': 'string'},
|
||||
'LibelleCompteEtranger': {'type': 'string'},
|
||||
'LibelleCourt': {'type': 'string'},
|
||||
'NumeroIban': {'type': 'string'},
|
||||
'CleIban': {'type': 'string'},
|
||||
'CodeBic': {'type': 'string'},
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
"IndicateurRibDefaut": {"type": "string"},
|
||||
"LibelleCompteEtranger": {"type": "string"},
|
||||
"LibelleCourt": {"type": "string"},
|
||||
"NumeroIban": {"type": "string"},
|
||||
"CleIban": {"type": "string"},
|
||||
"CodeBic": {"type": "string"},
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
},
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
},
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -492,6 +492,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('Find associations by SIREN number'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'siren': {'description': _('SIREN Number'), 'example_value': '77567227216096'},
|
||||
'organism': {'description': _('Organisme'), 'example_value': 'NOMDEVILLE'},
|
||||
|
@ -516,6 +517,7 @@ class AstreGS(BaseResource):
|
|||
@endpoint(
|
||||
description=_('Check if association exists by its SIRET number'),
|
||||
name='check-association-by-siret',
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'siret': {'description': _('SIRET Number'), 'example_value': '7756722721609600014'},
|
||||
'organism': {'description': _('Organisme'), 'example_value': 'NOMDEVILLE'},
|
||||
|
@ -533,6 +535,7 @@ class AstreGS(BaseResource):
|
|||
@endpoint(
|
||||
name='get-association-link-means',
|
||||
description=_('Get association linking means'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'association_id': {'description': _('Association ID'), 'example_value': '42435'},
|
||||
'NameID': {'description': _('Publik ID'), 'example_value': 'xyz24d934'},
|
||||
|
@ -582,6 +585,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('Create link between user and association'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'NameID': {
|
||||
'description': _('Publik NameID'),
|
||||
|
@ -615,6 +619,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('Remove link between user and association'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'NameID': {'description': _('Publik NameID'), 'example_value': 'xyz24d934'},
|
||||
'association_id': {'description': _('Association ID'), 'example_value': '12345'},
|
||||
|
@ -630,6 +635,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('List user links'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'NameID': {
|
||||
'description': _('Publik NameID'),
|
||||
|
@ -659,6 +665,7 @@ class AstreGS(BaseResource):
|
|||
return {'data': data}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
name='create-association',
|
||||
post={
|
||||
'description': _('Creates an association'),
|
||||
|
@ -673,6 +680,7 @@ class AstreGS(BaseResource):
|
|||
@endpoint(
|
||||
description=_('Get association informations'),
|
||||
name='get-association-by-id',
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'association_id': {'description': _('Association ID'), 'example_value': '42435'},
|
||||
'NameID': {'description': _('Publik ID'), 'example_value': 'xyz24d934'},
|
||||
|
@ -693,6 +701,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='get-contact',
|
||||
perm='can_access',
|
||||
description=_('Get contact details'),
|
||||
parameters={
|
||||
'contact_id': {
|
||||
|
@ -711,6 +720,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='create-contact',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Create contact'),
|
||||
'request_body': {'schema': {'application/json': CONTACT_SCHEMA}},
|
||||
|
@ -727,6 +737,7 @@ class AstreGS(BaseResource):
|
|||
@endpoint(
|
||||
description=_('Delete contact'),
|
||||
name='delete-contact',
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'contact_id': {'description': _('Contact ID'), 'example_value': '4242'},
|
||||
'organism': {'description': _('Organisme'), 'example_value': 'NOMDEVILLE'},
|
||||
|
@ -741,6 +752,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='create-document',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Create document'),
|
||||
'request_body': {'schema': {'application/json': DOCUMENT_SCHEMA}},
|
||||
|
@ -757,6 +769,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='create-grant-demand',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Create grant demand'),
|
||||
'request_body': {'schema': {'application/json': GRANT_SCHEMA}},
|
||||
|
@ -769,6 +782,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='create-indana-indicator',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Create indana indicator'),
|
||||
'request_body': {'schema': {'application/json': INDANA_SCHEMA}},
|
||||
|
@ -781,6 +795,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='update-indana-indicator',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Update indana indicator'),
|
||||
'request_body': {'schema': {'application/json': INDANA_SCHEMA}},
|
||||
|
@ -793,6 +808,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='delete-indana-indicator',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Delete indana indicator'),
|
||||
'request_body': {'schema': {'application/json': INDANA_KEY_SCHEMA}},
|
||||
|
@ -805,6 +821,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='create-tiers-rib',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Create RIB'),
|
||||
'request_body': {'schema': {'application/json': TIERS_RIB_SCHEMA}},
|
||||
|
@ -817,6 +834,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='get-tiers-rib',
|
||||
perm='can_access',
|
||||
description=_('Get RIB'),
|
||||
parameters={
|
||||
'CodeTiers': {'example_value': '42435'},
|
||||
|
@ -834,6 +852,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='update-tiers-rib',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Update RIB'),
|
||||
'request_body': {'schema': {'application/json': TIERS_RIB_UPDATE_SCHEMA}},
|
||||
|
@ -852,6 +871,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='delete-tiers-rib',
|
||||
perm='can_access',
|
||||
description=_('Delete RIB'),
|
||||
parameters={
|
||||
'CodeTiers': {'example_value': '42435'},
|
||||
|
@ -869,6 +889,7 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='find-tiers-by-rib',
|
||||
perm='can_access',
|
||||
description=_('Find person by RIB'),
|
||||
parameters={
|
||||
'banque': {'example_value': '30001'},
|
||||
|
@ -891,12 +912,13 @@ class AstreGS(BaseResource):
|
|||
for item in r.liste.EnregRechercheTiersReturn:
|
||||
tiers_data = serialize_object(item)
|
||||
tiers_data['id'] = tiers_data['N']
|
||||
tiers_data['text'] = '{Nom_Enregistrement} ({N})'.format(**tiers_data)
|
||||
tiers_data['text'] = '%{Nom_Enregistrement}s (%{N}s)'.format(**tiers_data)
|
||||
data.append(tiers_data)
|
||||
return {'data': data}
|
||||
|
||||
@endpoint(
|
||||
name='get-dossier',
|
||||
perm='can_access',
|
||||
description=_('Get Dossier'),
|
||||
parameters={
|
||||
'CodeDossier': {'example_value': '2021-0004933'},
|
||||
|
|
|
@ -23,7 +23,6 @@ from django.db import models
|
|||
from django.utils import dateformat, dateparse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from zeep import helpers
|
||||
from zeep.exceptions import Fault
|
||||
|
||||
from passerelle.base.models import BaseResource
|
||||
from passerelle.utils.api import endpoint
|
||||
|
@ -81,23 +80,24 @@ class ATALConnector(BaseResource):
|
|||
"""
|
||||
self._soap_client(wsdl='DemandeService')
|
||||
|
||||
@endpoint(methods=['get'], name='get-thematique')
|
||||
@endpoint(methods=['get'], perm='can_access', name='get-thematique')
|
||||
def get_thematique(self, request):
|
||||
return self._xml_ref('DemandeService', 'getThematiqueATAL', 'thematiques')
|
||||
|
||||
@endpoint(methods=['get'], name='get-type-activite')
|
||||
@endpoint(methods=['get'], perm='can_access', name='get-type-activite')
|
||||
def get_type_activite(self, request):
|
||||
return self._basic_ref('VilleAgileService', 'getTypeActivite')
|
||||
|
||||
@endpoint(methods=['get'], name='get-type-de-voie')
|
||||
@endpoint(methods=['get'], perm='can_access', name='get-type-de-voie')
|
||||
def get_type_de_voie(self, request):
|
||||
return self._basic_ref('VilleAgileService', 'getTypeDeVoie')
|
||||
|
||||
@endpoint(methods=['get'], name='get-types-equipement')
|
||||
@endpoint(methods=['get'], perm='can_access', name='get-types-equipement')
|
||||
def get_types_equipement(self, request):
|
||||
return self._xml_ref('VilleAgileService', 'getTypesEquipement', 'types')
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
name='insert-action-comment',
|
||||
post={
|
||||
'description': _('Insert action comment'),
|
||||
|
@ -114,6 +114,7 @@ class ATALConnector(BaseResource):
|
|||
return process_response(demande_number)
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
name='insert-demande-complet-by-type',
|
||||
post={
|
||||
'description': _('Insert demande complet by type'),
|
||||
|
@ -170,6 +171,7 @@ class ATALConnector(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
example_pattern='{demande_number}/',
|
||||
pattern=r'^(?P<demande_number>\w+)/$',
|
||||
name='retrieve-details-demande',
|
||||
|
@ -183,6 +185,7 @@ class ATALConnector(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
example_pattern='{demande_number}/',
|
||||
pattern=r'^(?P<demande_number>\w+)/$',
|
||||
name='retrieve-etat-travaux',
|
||||
|
@ -194,6 +197,7 @@ class ATALConnector(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
example_pattern='{demande_number}/',
|
||||
pattern=r'^(?P<demande_number>\w+)/$',
|
||||
parameters={
|
||||
|
@ -261,6 +265,7 @@ class ATALConnector(BaseResource):
|
|||
return {'data': data}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Upload a file'),
|
||||
'request_body': {'schema': {'application/json': schemas.UPLOAD}},
|
||||
|
@ -281,15 +286,12 @@ class ATALConnector(BaseResource):
|
|||
'numeroDemande': post_data['numero_demande'],
|
||||
'nomFichier': filename,
|
||||
}
|
||||
try:
|
||||
self._soap_call(wsdl='ChargementPiecesJointesService', method='upload', **data)
|
||||
except Fault as e:
|
||||
raise APIError(str(e))
|
||||
|
||||
self._soap_call(wsdl='ChargementPiecesJointesService', method='upload', **data)
|
||||
return {}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
example_pattern='{demande_number}/',
|
||||
pattern=r'^(?P<demande_number>\w+)/$',
|
||||
name='new-comments',
|
||||
|
|
|
@ -1,59 +0,0 @@
|
|||
# Generated by Django 3.2.18 on 2023-06-26 15:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = []
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AtalREST',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
|
||||
),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
(
|
||||
'basic_auth_username',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication username'
|
||||
),
|
||||
),
|
||||
(
|
||||
'basic_auth_password',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication password'
|
||||
),
|
||||
),
|
||||
(
|
||||
'client_certificate',
|
||||
models.FileField(
|
||||
blank=True, null=True, upload_to='', verbose_name='TLS client certificate'
|
||||
),
|
||||
),
|
||||
(
|
||||
'trusted_certificate_authorities',
|
||||
models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs'),
|
||||
),
|
||||
(
|
||||
'verify_cert',
|
||||
models.BooleanField(blank=True, default=True, verbose_name='TLS verify certificates'),
|
||||
),
|
||||
(
|
||||
'http_proxy',
|
||||
models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'),
|
||||
),
|
||||
('base_url', models.URLField(verbose_name='API URL')),
|
||||
('api_key', models.CharField(max_length=1024, verbose_name='API key')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Atal REST',
|
||||
},
|
||||
),
|
||||
]
|
|
@ -1,539 +0,0 @@
|
|||
# passerelle - uniform access to multiple data sources and services
|
||||
# Copyright (C) 2023 Entr'ouvert
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import base64
|
||||
import binascii
|
||||
import collections
|
||||
import io
|
||||
import json
|
||||
import urllib
|
||||
|
||||
import requests
|
||||
from django.db import models
|
||||
from django.utils import dateparse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from passerelle.base.models import BaseResource, HTTPResource
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
|
||||
FILE_OBJECT = {
|
||||
'type': 'object',
|
||||
'description': 'File object',
|
||||
'required': ['content'],
|
||||
'properties': {
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'description': 'Filename',
|
||||
},
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'description': 'Content',
|
||||
},
|
||||
'content_type': {
|
||||
'type': 'string',
|
||||
'description': 'Content type',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
SINGLE_ATTACHMENT_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'file': {
|
||||
'oneOf': [
|
||||
FILE_OBJECT,
|
||||
{'type': 'string', 'description': 'empty file, do not consider', 'pattern': r'^$'},
|
||||
{'type': 'null', 'description': 'empty file, do not consider'},
|
||||
]
|
||||
}
|
||||
},
|
||||
'required': ['file'],
|
||||
}
|
||||
|
||||
|
||||
ATTACHMENTS_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'files': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'oneOf': [
|
||||
FILE_OBJECT,
|
||||
{'type': 'string', 'description': 'empty file, do not consider', 'pattern': r'^$'},
|
||||
{'type': 'null', 'description': 'empty file, do not consider'},
|
||||
]
|
||||
},
|
||||
},
|
||||
'worksrequests_ids': {'type': 'array', 'items': {'type': 'string'}},
|
||||
},
|
||||
'required': ['files', 'worksrequests_ids'],
|
||||
'unflatten': True,
|
||||
}
|
||||
|
||||
|
||||
WORKSREQUESTS_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'properties': collections.OrderedDict(
|
||||
{
|
||||
'activity_nature_id': {'type': 'string'},
|
||||
'comments': {'type': 'string'},
|
||||
'contact': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'adress1': {'type': 'string'},
|
||||
'city': {'type': 'string'},
|
||||
'email': {'type': 'string'},
|
||||
'first_name': {'type': 'string'},
|
||||
'last_name': {'type': 'string'},
|
||||
'mobile': {'type': 'string'},
|
||||
'phone': {'type': 'string'},
|
||||
'zipcode': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
'description': {'type': 'string'},
|
||||
'desired_date': {'type': 'string', 'description': 'format YYYY-MM-DD'},
|
||||
'keywords': {'type': 'string'},
|
||||
'latitude': {
|
||||
'oneOf': [
|
||||
{'type': 'number'},
|
||||
{'type': 'string'},
|
||||
]
|
||||
},
|
||||
'localization': {'type': 'string'},
|
||||
'longitude': {
|
||||
'oneOf': [
|
||||
{'type': 'number'},
|
||||
{'type': 'string'},
|
||||
]
|
||||
},
|
||||
'object': {'type': 'string'},
|
||||
'operator': {'type': 'string'},
|
||||
'patrimony_id': {'type': 'string'},
|
||||
'priority_id': {'type': 'string'},
|
||||
'recipient_id': {'type': 'string'},
|
||||
'request_date': {
|
||||
'type': 'string',
|
||||
'description': 'format YYYY-MM-DD',
|
||||
},
|
||||
'requester_id': {'type': 'string'},
|
||||
'requesting_department_id': {'type': 'string'},
|
||||
'request_type': {'type': 'string'},
|
||||
'suggested_recipient_id': {'type': 'string'},
|
||||
'thematic_ids': {'type': 'array', 'items': {'type': 'string'}},
|
||||
}
|
||||
),
|
||||
'required': ['object', 'recipient_id', 'requester_id', 'requesting_department_id'],
|
||||
'unflatten': True,
|
||||
}
|
||||
|
||||
STATUS_MAP = {
|
||||
0: 'En attente',
|
||||
1: 'En analyse',
|
||||
2: 'Acceptée',
|
||||
3: 'Refusée',
|
||||
4: 'Annulée',
|
||||
5: 'Ajournée',
|
||||
6: 'Brouillon',
|
||||
7: 'Redirigée',
|
||||
8: 'Prise en compte',
|
||||
9: 'Clôturée',
|
||||
13: 'Archivée',
|
||||
14: 'À spécifier',
|
||||
15: 'À valider',
|
||||
}
|
||||
|
||||
|
||||
INTERVENTION_STATUS_MAP = {
|
||||
1: 'Pas commencé',
|
||||
2: 'En cours',
|
||||
4: 'Terminé',
|
||||
5: 'Fermé',
|
||||
}
|
||||
|
||||
|
||||
def to_ds(record):
|
||||
record['id'] = record['Id']
|
||||
record['text'] = record['Name']
|
||||
return record
|
||||
|
||||
|
||||
class AtalREST(BaseResource, HTTPResource):
|
||||
base_url = models.URLField(_('API URL'))
|
||||
api_key = models.CharField(max_length=1024, verbose_name=_('API key'))
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Atal REST')
|
||||
|
||||
def _call(
|
||||
self, path, method='get', params=None, data=None, json_data=None, files=None, return_response=False
|
||||
):
|
||||
url = urllib.parse.urljoin(self.base_url, path)
|
||||
kwargs = {}
|
||||
kwargs['headers'] = {'X-API-Key': self.api_key}
|
||||
if params:
|
||||
kwargs['params'] = params
|
||||
|
||||
if method == 'post':
|
||||
if not json_data:
|
||||
json_data = {}
|
||||
kwargs['json'] = json_data
|
||||
if files:
|
||||
kwargs['files'] = files
|
||||
if data:
|
||||
kwargs['data'] = data
|
||||
|
||||
try:
|
||||
resp = self.requests.request(url=url, method=method, **kwargs)
|
||||
except (requests.Timeout, requests.RequestException) as e:
|
||||
raise APIError(str(e))
|
||||
try:
|
||||
resp.raise_for_status()
|
||||
except requests.RequestException as main_exc:
|
||||
try:
|
||||
err_data = resp.json()
|
||||
except (json.JSONDecodeError, requests.exceptions.RequestException):
|
||||
err_data = {'response_text': resp.text}
|
||||
raise APIError(str(main_exc), data=err_data)
|
||||
|
||||
if return_response:
|
||||
return resp
|
||||
|
||||
try:
|
||||
return resp.json()
|
||||
except (json.JSONDecodeError, requests.exceptions.RequestException) as e:
|
||||
raise APIError(str(e))
|
||||
|
||||
def check_status(self):
|
||||
return self._call('api/Test', return_response=True)
|
||||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
name='thirdparties-requesting-departments',
|
||||
description=_('Get the third parties requesting departments referential'),
|
||||
parameters={
|
||||
'request_type': {
|
||||
'example_value': '1001',
|
||||
}
|
||||
},
|
||||
)
|
||||
def thirdparties_requesting_departments(self, request, request_type):
|
||||
return {
|
||||
'data': [
|
||||
to_ds(record)
|
||||
for record in self._call(
|
||||
'api/ThirdParties/RequestingDepartments', params={'RequestType': request_type}
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
description=_('Get the users referential'),
|
||||
)
|
||||
def users(self, request):
|
||||
return {'data': [to_ds(record) for record in self._call('api/Users')]}
|
||||
|
||||
@endpoint(
|
||||
description=_('Create a works request'),
|
||||
post={
|
||||
'request_body': {
|
||||
'schema': {
|
||||
'application/json': WORKSREQUESTS_SCHEMA,
|
||||
}
|
||||
},
|
||||
'input_example': {
|
||||
'activity_nature_id': '0',
|
||||
'comments': 'some comment',
|
||||
'contact/adress1': '1 rue des cinq diamants',
|
||||
'contact/city': 'paris',
|
||||
'contact/email': 'foo@bar.invalid',
|
||||
'contact/first_name': 'john',
|
||||
'contact/last_name': 'doe',
|
||||
'contact/mobile': '0606060606',
|
||||
'contact/phone': '0101010101',
|
||||
'contact/zipcode': '75013',
|
||||
'description': 'some description',
|
||||
'desired_date': '2023-06-28',
|
||||
'keywords': 'foo bar',
|
||||
'latitude': '0',
|
||||
'localization': 'somewhere',
|
||||
'longitude': '0',
|
||||
'object': 'some object',
|
||||
'operator': 'some operator',
|
||||
'patrimony_id': '0',
|
||||
'priority_id': '0',
|
||||
'recipient_id': '0',
|
||||
'request_date': '2023-06-27',
|
||||
'requester_id': '0',
|
||||
'requesting_department_id': '0',
|
||||
'request_type': '0',
|
||||
'suggested_recipient_id': {'type': 'string'},
|
||||
'thematic_ids/0': '1',
|
||||
'thematic_ids/1': '2',
|
||||
},
|
||||
},
|
||||
)
|
||||
def worksrequests(self, request, post_data):
|
||||
data = {}
|
||||
int_params = {
|
||||
'activity_nature_id': 'ActivityNatureId',
|
||||
'patrimony_id': 'PatrimonyId',
|
||||
'priority_id': 'PriorityId',
|
||||
'recipient_id': 'RecipientId',
|
||||
'requester_id': 'RequesterId',
|
||||
'requesting_department_id': 'RequestingDepartmentId',
|
||||
'request_type': 'RequestType',
|
||||
'suggested_recipient_id': 'SuggestedRecipientId',
|
||||
}
|
||||
for param, atal_param in int_params.items():
|
||||
if param in post_data:
|
||||
try:
|
||||
data[atal_param] = int(post_data[param])
|
||||
except ValueError:
|
||||
raise APIError('%s must be an integer' % param)
|
||||
|
||||
float_params = {
|
||||
'latitude': 'Latitude',
|
||||
'longitude': 'Longitude',
|
||||
}
|
||||
for param, atal_param in float_params.items():
|
||||
param_value = post_data.get(param, '')
|
||||
if param_value:
|
||||
if isinstance(param_value, str):
|
||||
param_value = param_value.replace(',', '.')
|
||||
try:
|
||||
data[atal_param] = float(param_value)
|
||||
except ValueError:
|
||||
raise APIError('%s must be a float' % param)
|
||||
|
||||
if 'thematic_ids' in post_data:
|
||||
data['ThematicIds'] = []
|
||||
for thematic_id in post_data['thematic_ids']:
|
||||
try:
|
||||
data['ThematicIds'].append(int(thematic_id))
|
||||
except ValueError:
|
||||
raise APIError('a thematic identifier must be an intenger')
|
||||
|
||||
datetime_params = {
|
||||
'desired_date': 'DesiredDate',
|
||||
'request_date': 'RequestDate',
|
||||
}
|
||||
for param, atal_param in datetime_params.items():
|
||||
if param in post_data:
|
||||
try:
|
||||
obj = dateparse.parse_date(post_data[param])
|
||||
except ValueError:
|
||||
obj = None
|
||||
if obj is None:
|
||||
raise APIError(
|
||||
'%s must be a valid YYYY-MM-DD date (received: "%s")' % (param, post_data[param])
|
||||
)
|
||||
data[atal_param] = obj.isoformat()
|
||||
|
||||
contact_params = {
|
||||
'adress1': 'Adress1',
|
||||
'city': 'City',
|
||||
'email': 'Email',
|
||||
'first_name': 'FirstName',
|
||||
'last_name': 'LastName',
|
||||
'mobile': 'Mobile',
|
||||
'phone': 'Phone',
|
||||
'zipcode': 'ZipCode',
|
||||
}
|
||||
if 'contact' in post_data:
|
||||
data['Contact'] = {}
|
||||
for param, atal_param in contact_params.items():
|
||||
if param in post_data['contact']:
|
||||
data['Contact'][atal_param] = post_data['contact'][param]
|
||||
|
||||
string_params = {
|
||||
'comments': 'Comments',
|
||||
'description': 'Description',
|
||||
'keywords': 'Keywords',
|
||||
'localization': 'Localization',
|
||||
'object': 'Object',
|
||||
'operator': 'Operator',
|
||||
}
|
||||
for param, atal_param in string_params.items():
|
||||
if param in post_data:
|
||||
data[atal_param] = post_data[param]
|
||||
|
||||
resp_data = self._call('api/WorksRequests', method='post', json_data=data)
|
||||
resp_data['RequestStateLabel'] = STATUS_MAP.get(resp_data.get('RequestState', ''), '')
|
||||
return {'data': resp_data}
|
||||
|
||||
@endpoint(
|
||||
description=_('Add an attachment to a works requests'),
|
||||
name='worksrequests-single-attachment',
|
||||
post={
|
||||
'request_body': {
|
||||
'schema': {
|
||||
'application/json': SINGLE_ATTACHMENT_SCHEMA,
|
||||
}
|
||||
},
|
||||
'input_example': {
|
||||
'file': {
|
||||
'filename': 'example-1.pdf',
|
||||
'content_type': 'application/pdf',
|
||||
'content': 'JVBERi0xL...(base64 PDF)...',
|
||||
},
|
||||
},
|
||||
},
|
||||
parameters={
|
||||
'worksrequests_id': {
|
||||
'example_value': '1',
|
||||
}
|
||||
},
|
||||
)
|
||||
def worksrequests_single_attachment(self, request, worksrequests_id, post_data):
|
||||
if not post_data['file']:
|
||||
return {}
|
||||
try:
|
||||
content = base64.b64decode(post_data['file']['content'])
|
||||
except (TypeError, binascii.Error):
|
||||
raise APIError('Invalid file content')
|
||||
|
||||
files = {
|
||||
'File': (
|
||||
post_data['file'].get('filename', ''),
|
||||
io.BytesIO(content).read(),
|
||||
post_data['file'].get('content_type', ''),
|
||||
)
|
||||
}
|
||||
# return nothing if successful
|
||||
self._call(
|
||||
'api/WorksRequests/%s/Attachments' % worksrequests_id,
|
||||
method='post',
|
||||
files=files,
|
||||
return_response=True,
|
||||
)
|
||||
return {}
|
||||
|
||||
@endpoint(
|
||||
description=_('Add attachments to multiple works requests'),
|
||||
name='worksrequests-attachments',
|
||||
post={
|
||||
'request_body': {
|
||||
'schema': {
|
||||
'application/json': ATTACHMENTS_SCHEMA,
|
||||
}
|
||||
},
|
||||
'input_example': {
|
||||
'files/0': {
|
||||
'filename': 'example-1.pdf',
|
||||
'content_type': 'application/pdf',
|
||||
'content': 'JVBERi0xL...(base64 PDF)...',
|
||||
},
|
||||
'files/1': {
|
||||
'filename': 'example-2.pdf',
|
||||
'content_type': 'application/pdf',
|
||||
'content': 'JVBERi0xL...(base64 PDF)...',
|
||||
},
|
||||
'worksrequests_ids/0': '1',
|
||||
'worksrequests_ids/1': '2',
|
||||
},
|
||||
},
|
||||
)
|
||||
def worksrequests_attachments(self, request, post_data):
|
||||
files = []
|
||||
for file_ in post_data.get('files', []):
|
||||
if not file_:
|
||||
continue
|
||||
try:
|
||||
content = base64.b64decode(file_['content'])
|
||||
except (TypeError, binascii.Error):
|
||||
raise APIError('Invalid file content')
|
||||
files.append(
|
||||
(
|
||||
'Files',
|
||||
(
|
||||
file_.get('filename', ''),
|
||||
io.BytesIO(content).read(),
|
||||
file_.get('content_type', ''),
|
||||
),
|
||||
)
|
||||
)
|
||||
if not files:
|
||||
return {}
|
||||
data = {'Ids': post_data['worksrequests_ids']}
|
||||
# return nothing if successful
|
||||
self._call(
|
||||
'api/WorksRequests/Attachments',
|
||||
method='post',
|
||||
files=files,
|
||||
data=data,
|
||||
return_response=True,
|
||||
)
|
||||
return {}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
name='worksrequest-status',
|
||||
description=_('Get the status of a works request'),
|
||||
parameters={
|
||||
'worksrequests_id': {
|
||||
'example_value': '1',
|
||||
},
|
||||
'filter_responses': {
|
||||
'example_value': '501,507',
|
||||
},
|
||||
},
|
||||
)
|
||||
def worksrequest_status(self, request, worksrequests_id, filter_responses=None):
|
||||
filter_responses = (
|
||||
[type_id.strip() for type_id in filter_responses.split(',') if type_id.strip()]
|
||||
if filter_responses
|
||||
else []
|
||||
)
|
||||
action_type_ids = []
|
||||
for type_id in filter_responses:
|
||||
try:
|
||||
action_type_ids.append(int(type_id))
|
||||
except ValueError:
|
||||
raise APIError('filter_responses must be a list of integer')
|
||||
|
||||
resp_data = self._call('api/WorksRequests/%s' % worksrequests_id, params={'$expand': 'Responses'})
|
||||
resp_data['RequestStateLabel'] = STATUS_MAP.get(resp_data.get('RequestState', ''), '')
|
||||
if action_type_ids:
|
||||
responses = resp_data.pop('Responses', [])
|
||||
resp_data['Responses'] = [
|
||||
resp for resp in responses if resp.get('ActionTypeId') in action_type_ids
|
||||
] or []
|
||||
return {'data': resp_data}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
name='worksrequest-intervention-status',
|
||||
description=_('Get the status of a works request intervention'),
|
||||
parameters={
|
||||
'number': {
|
||||
'example_value': 'DIT23070011',
|
||||
}
|
||||
},
|
||||
)
|
||||
def worksrequest_intervention_status(self, request, number):
|
||||
resp_data = self._call('/api/WorksRequests/GetInterventionStates', params={'number': number})
|
||||
resp_data = resp_data[0] if resp_data else {}
|
||||
resp_data['WorkStateLabel'] = INTERVENTION_STATUS_MAP.get(resp_data.get('WorkState', ''), '')
|
||||
return {'data': resp_data}
|
|
@ -199,6 +199,7 @@ class Resource(BaseResource, HTTPResource):
|
|||
name='link',
|
||||
methods=['post'],
|
||||
description=_('Create link with an extranet account'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'NameID': {
|
||||
'description': _('Publik NameID'),
|
||||
|
@ -232,6 +233,7 @@ class Resource(BaseResource, HTTPResource):
|
|||
name='unlink',
|
||||
methods=['post'],
|
||||
description=_('Delete link with an extranet account'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'NameID': {
|
||||
'description': _('Publik NameID'),
|
||||
|
@ -290,6 +292,7 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='dossiers',
|
||||
description=_('Get datas for all links'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'NameID': {
|
||||
'description': _('Publik NameID'),
|
||||
|
@ -369,6 +372,7 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='search',
|
||||
description=_('Search for beneficiaries'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'first_name': {
|
||||
'description': _('Beneficiary first name'),
|
||||
|
@ -502,6 +506,7 @@ class Resource(BaseResource, HTTPResource):
|
|||
name='link-by-id-per',
|
||||
methods=['post'],
|
||||
description=_('Create link with an extranet account'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'NameID': {
|
||||
'description': _('Publik NameID'),
|
||||
|
@ -521,6 +526,7 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='dossier-by-pair',
|
||||
description=_('Get dossier data with two integers'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'p1': {
|
||||
'description': _('First integer'),
|
||||
|
|
|
@ -91,7 +91,6 @@ class Migration(migrations.Migration):
|
|||
blank=True,
|
||||
max_length=600,
|
||||
verbose_name='Postal codes or department number to get streets, separated with commas',
|
||||
help_text='This parameter is only useful for the /streets/ endpoint (very rarely used)',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
|
|
|
@ -6,19 +6,19 @@ from django.db import migrations, models
|
|||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('base_adresse', '0018_text_to_jsonb'),
|
||||
("base_adresse", "0018_text_to_jsonb"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='streetmodel',
|
||||
name='resource',
|
||||
model_name="streetmodel",
|
||||
name="resource",
|
||||
field=models.ForeignKey(
|
||||
default=None,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to='base_adresse.BaseAdresse',
|
||||
verbose_name='BAN Connector',
|
||||
to="base_adresse.BaseAdresse",
|
||||
verbose_name="BAN Connector",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -4,8 +4,8 @@ from django.db import migrations
|
|||
|
||||
|
||||
def set_streetmodel_resource(apps, schema_editor):
|
||||
BaseAdresse = apps.get_model('base_adresse', 'BaseAdresse')
|
||||
StreetModel = apps.get_model('base_adresse', 'StreetModel')
|
||||
BaseAdresse = apps.get_model("base_adresse", "BaseAdresse")
|
||||
StreetModel = apps.get_model("base_adresse", "StreetModel")
|
||||
if BaseAdresse.objects.exists():
|
||||
StreetModel.objects.update(resource=BaseAdresse.objects.first())
|
||||
else:
|
||||
|
@ -14,7 +14,7 @@ def set_streetmodel_resource(apps, schema_editor):
|
|||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('base_adresse', '0019_streetmodel_resource_add'),
|
||||
("base_adresse", "0019_streetmodel_resource_add"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
|
|
|
@ -6,17 +6,17 @@ from django.db import migrations, models
|
|||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('base_adresse', '0020_streetmodel_resource_runpython'),
|
||||
("base_adresse", "0020_streetmodel_resource_runpython"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='streetmodel',
|
||||
name='resource',
|
||||
model_name="streetmodel",
|
||||
name="resource",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to='base_adresse.BaseAdresse',
|
||||
verbose_name='BAN Connector',
|
||||
to="base_adresse.BaseAdresse",
|
||||
verbose_name="BAN Connector",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -4,11 +4,11 @@ from django.db import migrations
|
|||
|
||||
|
||||
def set_resource(apps, schema_editor):
|
||||
BaseAdresse = apps.get_model('base_adresse', 'BaseAdresse')
|
||||
RegionModel = apps.get_model('base_adresse', 'RegionModel')
|
||||
DepartmentModel = apps.get_model('base_adresse', 'DepartmentModel')
|
||||
CityModel = apps.get_model('base_adresse', 'CityModel')
|
||||
AddressCacheModel = apps.get_model('base_adresse', 'AddressCacheModel')
|
||||
BaseAdresse = apps.get_model("base_adresse", "BaseAdresse")
|
||||
RegionModel = apps.get_model("base_adresse", "RegionModel")
|
||||
DepartmentModel = apps.get_model("base_adresse", "DepartmentModel")
|
||||
CityModel = apps.get_model("base_adresse", "CityModel")
|
||||
AddressCacheModel = apps.get_model("base_adresse", "AddressCacheModel")
|
||||
if BaseAdresse.objects.exists():
|
||||
resource = BaseAdresse.objects.first()
|
||||
RegionModel.objects.update(resource=resource)
|
||||
|
|
|
@ -1,67 +0,0 @@
|
|||
# Generated by Django 3.2.18 on 2023-11-29 18:06
|
||||
|
||||
import django.contrib.postgres.indexes
|
||||
import django.db.models.functions.text
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('base_adresse', '0030_auto_20220627_1511'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
[
|
||||
'CREATE EXTENSION IF NOT EXISTS pg_trgm WITH SCHEMA public',
|
||||
],
|
||||
reverse_sql=['DROP EXTENSION IF EXISTS pg_trgm'],
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='citymodel',
|
||||
index=models.Index(fields=['code'], name='base_adress_code_e169d0_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='citymodel',
|
||||
index=models.Index(fields=['zipcode'], name='base_adress_zipcode_79aa6f_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='citymodel',
|
||||
index=models.Index(
|
||||
django.db.models.functions.text.Upper('unaccent_name'), name='base_adresse_city_name_idx'
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='departmentmodel',
|
||||
index=models.Index(
|
||||
django.db.models.functions.text.Upper('unaccent_name'), name='base_adresse_dept_name_idx'
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='regionmodel',
|
||||
index=models.Index(
|
||||
django.db.models.functions.text.Upper('unaccent_name'), name='base_adresse_region_name_idx'
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='streetmodel',
|
||||
index=models.Index(fields=['ban_id'], name='base_adress_ban_id_2c35ab_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='streetmodel',
|
||||
index=models.Index(fields=['zipcode'], name='base_adress_zipcode_bf7091_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='streetmodel',
|
||||
index=models.Index(fields=['citycode'], name='base_adress_citycod_428b79_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='streetmodel',
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
django.contrib.postgres.indexes.OpClass(
|
||||
django.db.models.functions.text.Upper('unaccent_name'), 'public.gin_trgm_ops'
|
||||
),
|
||||
name='base_adresse_street_name_idx',
|
||||
),
|
||||
),
|
||||
]
|
|
@ -5,11 +5,9 @@ import json
|
|||
from io import StringIO
|
||||
from urllib import parse as urlparse
|
||||
|
||||
from django.contrib.postgres import indexes as postgresql_indexes
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db import connection, models
|
||||
from django.db.models import JSONField, Q
|
||||
from django.db.models.functions import Upper
|
||||
from django.utils import timezone
|
||||
from django.utils.http import urlencode
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
@ -46,6 +44,12 @@ class BaseAdresse(AddressResource):
|
|||
'<a href="https://api.gouv.fr/api/api-geo.html">API Geo</a>.'
|
||||
)
|
||||
|
||||
zipcode = models.CharField(
|
||||
max_length=600,
|
||||
blank=True,
|
||||
verbose_name=_('Postal codes or department number to get streets, separated with commas'),
|
||||
)
|
||||
|
||||
latitude = models.FloatField(
|
||||
null=True,
|
||||
blank=True,
|
||||
|
@ -59,13 +63,6 @@ class BaseAdresse(AddressResource):
|
|||
help_text=_('Geographic priority for /addresses/ endpoint.'),
|
||||
)
|
||||
|
||||
zipcode = models.CharField(
|
||||
max_length=600,
|
||||
blank=True,
|
||||
verbose_name=_('Postal codes or department number to get streets, separated with commas'),
|
||||
help_text=_('This parameter is only useful for the /streets/ endpoint (very rarely used)'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Base Adresse Web Service')
|
||||
|
||||
|
@ -112,7 +109,6 @@ class BaseAdresse(AddressResource):
|
|||
@endpoint(
|
||||
pattern='(?P<q>.+)?$',
|
||||
description=_('Addresses list'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'id': {'description': _('Address identifier')},
|
||||
'q': {'description': _('Address'), 'example_value': '169 rue du chateau, paris'},
|
||||
|
@ -129,24 +125,10 @@ class BaseAdresse(AddressResource):
|
|||
'Prioritize results according to coordinates. "lat" parameter must also be present.'
|
||||
)
|
||||
},
|
||||
'type': {
|
||||
'description': _(
|
||||
'Type of address to return, housenumber, street, locality, municipality or all. Default is all.'
|
||||
)
|
||||
},
|
||||
},
|
||||
)
|
||||
def addresses(
|
||||
self,
|
||||
request,
|
||||
id=None,
|
||||
q=None,
|
||||
zipcode='',
|
||||
citycode=None,
|
||||
lat=None,
|
||||
lon=None,
|
||||
page_limit=5,
|
||||
type=None,
|
||||
self, request, id=None, q=None, zipcode='', citycode=None, lat=None, lon=None, page_limit=5
|
||||
):
|
||||
if id is not None:
|
||||
return self.get_by_id(request, id=id, citycode=citycode)
|
||||
|
@ -174,8 +156,6 @@ class BaseAdresse(AddressResource):
|
|||
if self.latitude and self.longitude or lat and lon:
|
||||
query_args['lat'] = lat or self.latitude
|
||||
query_args['lon'] = lon or self.longitude
|
||||
if type in ('housenumber', 'street', 'locality', 'municipality'):
|
||||
query_args['type'] = type
|
||||
query = urlencode(query_args)
|
||||
url = urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
|
||||
|
||||
|
@ -187,8 +167,7 @@ class BaseAdresse(AddressResource):
|
|||
|
||||
result = []
|
||||
|
||||
features = result_response.json().get('features')
|
||||
for feature in features:
|
||||
for feature in result_response.json().get('features'):
|
||||
if not feature['geometry']['type'] == 'Point':
|
||||
continue # skip unknown
|
||||
data = self.format_address_data(feature)
|
||||
|
@ -198,6 +177,7 @@ class BaseAdresse(AddressResource):
|
|||
)
|
||||
if not created:
|
||||
address.update_timestamp()
|
||||
|
||||
return {'data': result}
|
||||
|
||||
def get_by_id(self, request, id, citycode=None):
|
||||
|
@ -242,7 +222,6 @@ class BaseAdresse(AddressResource):
|
|||
@endpoint(
|
||||
pattern='(?P<q>.+)?$',
|
||||
description=_('Geocoding (Nominatim API)'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'q': {'description': _('Address'), 'example_value': '169 rue du chateau, paris'},
|
||||
'zipcode': {'description': _('Zipcode')},
|
||||
|
@ -257,51 +236,30 @@ class BaseAdresse(AddressResource):
|
|||
'Prioritize results according to coordinates. "lon" parameter must be present.'
|
||||
)
|
||||
},
|
||||
'type': {
|
||||
'description': _(
|
||||
'Type of address to return, housenumber, street, locality, municipality or all. Default is all.'
|
||||
)
|
||||
},
|
||||
},
|
||||
)
|
||||
def search(self, request, q, zipcode='', citycode=None, lat=None, lon=None, type=None, **kwargs):
|
||||
def search(self, request, q, zipcode='', citycode=None, lat=None, lon=None, **kwargs):
|
||||
if kwargs.get('format', 'json') != 'json':
|
||||
raise NotImplementedError()
|
||||
result = self.addresses(
|
||||
request,
|
||||
q=q,
|
||||
zipcode=zipcode,
|
||||
citycode=citycode,
|
||||
lat=lat,
|
||||
lon=lon,
|
||||
page_limit=1,
|
||||
type=type,
|
||||
request, q=q, zipcode=zipcode, citycode=citycode, lat=lat, lon=lon, page_limit=1
|
||||
)
|
||||
return result['data']
|
||||
|
||||
@endpoint(
|
||||
description=_('Reverse geocoding'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'lat': {'description': _('Latitude'), 'example_value': 48.833708},
|
||||
'lon': {'description': _('Longitude'), 'example_value': 2.323349},
|
||||
'type': {
|
||||
'description': _(
|
||||
'Type of address to return, housenumber, street, locality, municipality or all. Default is all.'
|
||||
)
|
||||
},
|
||||
},
|
||||
)
|
||||
def reverse(self, request, lat, lon, type=None, **kwargs):
|
||||
def reverse(self, request, lat, lon, **kwargs):
|
||||
if kwargs.get('format', 'json') != 'json':
|
||||
raise NotImplementedError()
|
||||
|
||||
scheme, netloc, path, params, query, fragment = urlparse.urlparse(self.service_url)
|
||||
path = urlparse.urljoin(path, 'reverse/')
|
||||
query_dict = {'lat': lat, 'lon': lon}
|
||||
if type in ('housenumber', 'street', 'locality', 'municipality'):
|
||||
query_dict['type'] = type
|
||||
query = urlencode(query_dict)
|
||||
query = urlencode({'lat': lat, 'lon': lon})
|
||||
url = urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
|
||||
|
||||
try:
|
||||
|
@ -325,10 +283,9 @@ class BaseAdresse(AddressResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('Streets from zipcode'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'id': {'description': _('Street identifier')},
|
||||
'q': {'description': _('Street name')},
|
||||
'q': {'description': _("Street name")},
|
||||
'zipcode': {'description': _('Zipcode')},
|
||||
'citycode': {'description': _('INSEE City code')},
|
||||
'page_limit': {'description': _('Maximum number of results to return'), 'example_value': 30},
|
||||
|
@ -379,13 +336,12 @@ class BaseAdresse(AddressResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('Cities list'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'id': {
|
||||
'description': _('Get exactly one city using its code and postal code separated with a dot'),
|
||||
'example_value': '75056.75014',
|
||||
},
|
||||
'q': {'description': _('Search text in name or postal code'), 'example_value': 'Paris'},
|
||||
'q': {'description': _("Search text in name or postal code"), 'example_value': 'Paris'},
|
||||
'code': {
|
||||
'description': _('INSEE code (or multiple codes separated with commas)'),
|
||||
'example_value': '75056',
|
||||
|
@ -441,7 +397,6 @@ class BaseAdresse(AddressResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('Departments list'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'id': {'description': _('Get exactly one department using its code'), 'example_value': '59'},
|
||||
'q': {'description': _('Search text in name or code'), 'example_value': 'Nord'},
|
||||
|
@ -466,7 +421,6 @@ class BaseAdresse(AddressResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('Regions list'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'id': {'description': _('Get exactly one region using its code'), 'example_value': '32'},
|
||||
'q': {'description': _('Search text in name or code'), 'example_value': 'Hauts-de-France'},
|
||||
|
@ -737,15 +691,6 @@ class StreetModel(UnaccentNameMixin, models.Model):
|
|||
|
||||
class Meta:
|
||||
ordering = ['unaccent_name', 'name']
|
||||
indexes = [
|
||||
models.Index(fields=['ban_id']),
|
||||
models.Index(fields=['zipcode']),
|
||||
models.Index(fields=['citycode']),
|
||||
postgresql_indexes.GinIndex(
|
||||
postgresql_indexes.OpClass(Upper('unaccent_name'), 'public.gin_trgm_ops'),
|
||||
name='%(app_label)s_street_name_idx',
|
||||
),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
@ -770,9 +715,6 @@ class RegionModel(UnaccentNameMixin, models.Model):
|
|||
class Meta:
|
||||
ordering = ['code']
|
||||
unique_together = ('resource', 'code')
|
||||
indexes = [
|
||||
models.Index(Upper('unaccent_name'), name='%(app_label)s_region_name_idx'),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return '%s %s' % (self.code, self.name)
|
||||
|
@ -800,9 +742,6 @@ class DepartmentModel(UnaccentNameMixin, models.Model):
|
|||
class Meta:
|
||||
ordering = ['code']
|
||||
unique_together = ('resource', 'code')
|
||||
indexes = [
|
||||
models.Index(Upper('unaccent_name'), name='%(app_label)s_dept_name_idx'),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return '%s %s' % (self.code, self.name)
|
||||
|
@ -826,9 +765,7 @@ class CityModel(UnaccentNameMixin, models.Model):
|
|||
'id': '%s.%s' % (self.code, self.zipcode),
|
||||
'code': self.code,
|
||||
'name': self.name,
|
||||
'city': self.name,
|
||||
'zipcode': self.zipcode,
|
||||
'postcode': self.zipcode,
|
||||
'population': self.population,
|
||||
'department_code': self.department.code if self.department else None,
|
||||
'department_name': self.department.name if self.department else None,
|
||||
|
@ -840,11 +777,6 @@ class CityModel(UnaccentNameMixin, models.Model):
|
|||
class Meta:
|
||||
ordering = ['-population', 'zipcode', 'unaccent_name', 'name']
|
||||
unique_together = ('resource', 'code', 'zipcode')
|
||||
indexes = [
|
||||
models.Index(fields=['code']),
|
||||
models.Index(fields=['zipcode']),
|
||||
models.Index(Upper('unaccent_name'), name='%(app_label)s_city_name_idx'),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return '%s %s' % (self.zipcode, self.name)
|
||||
|
|
|
@ -68,6 +68,7 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
methods=['post'],
|
||||
name='meeting',
|
||||
perm='can_access',
|
||||
description_post=_('Create a meeting'),
|
||||
post={
|
||||
'request_body': {
|
||||
|
@ -145,6 +146,7 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
methods=['get', 'delete'],
|
||||
name='meeting',
|
||||
perm='can_access',
|
||||
pattern=r'^(?P<guid>[0-9a-f]{32})/?$',
|
||||
example_pattern='{guid}/',
|
||||
description_post=_('Get a meeting'),
|
||||
|
@ -172,7 +174,6 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
methods=['get'],
|
||||
name='meeting',
|
||||
perm='OPEN',
|
||||
pattern=r'^(?P<guid>[0-9a-f]{32})/is-running/?$',
|
||||
example_pattern='{guid}/is-running/',
|
||||
description_post=_('Report if meeting is running'),
|
||||
|
@ -195,7 +196,6 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
methods=['get'],
|
||||
name='meeting',
|
||||
perm='OPEN',
|
||||
pattern=r'^(?P<guid>[0-9a-f]{32})/join/agent/(?P<key>[^/]*)/?$',
|
||||
example_pattern='{guid}/join/agent/',
|
||||
description_post=_('Get a meeting'),
|
||||
|
@ -223,7 +223,6 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
methods=['get'],
|
||||
name='meeting',
|
||||
perm='OPEN',
|
||||
pattern=r'^(?P<guid>[0-9a-f]{32})/join/user/(?P<key>[^/]*)/?$',
|
||||
example_pattern='{guid}/join/user/',
|
||||
description_post=_('Get a meeting'),
|
||||
|
|
|
@ -1,47 +0,0 @@
|
|||
# Generated by Django 3.2.18 on 2024-02-20 15:41
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('base', '0030_resourcelog_base_resour_appname_298cbc_idx'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CalDAV',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
|
||||
),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
(
|
||||
'dav_url',
|
||||
models.URLField(
|
||||
help_text='DAV root URL (such as https://test.egw/groupdav.php/)',
|
||||
verbose_name='DAV root URL',
|
||||
),
|
||||
),
|
||||
('dav_login', models.CharField(max_length=128, verbose_name='DAV username')),
|
||||
('dav_password', models.CharField(max_length=512, verbose_name='DAV password')),
|
||||
(
|
||||
'users',
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name='_caldav_caldav_users_+',
|
||||
related_query_name='+',
|
||||
to='base.ApiUser',
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'CalDAV',
|
||||
},
|
||||
),
|
||||
]
|
|
@ -1,367 +0,0 @@
|
|||
# passerelle - uniform access to multiple data sources and services
|
||||
# Copyright (C) 2024 Entr'ouvert
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import functools
|
||||
import urllib.parse
|
||||
|
||||
import caldav
|
||||
import requests
|
||||
from django.db import models
|
||||
from django.utils.dateparse import parse_date, parse_datetime
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from passerelle.base.models import BaseResource
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.conversion import exception_to_text
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
|
||||
EVENT_SCHEMA_PART = {
|
||||
'type': 'object',
|
||||
'description': _('Ical event properties ( VEVENT RFC 5545 3.6.1 )'),
|
||||
'properties': {
|
||||
'DTSTART': {
|
||||
'type': 'string',
|
||||
'description': _('Event start (included) ISO-8601 date-time or date (for allday event)'),
|
||||
},
|
||||
'DTEND': {
|
||||
'type': 'string',
|
||||
'description': _('Event end (excluded) ISO-8601 date-time or date (for allday event)'),
|
||||
},
|
||||
'SUMMARY': {
|
||||
'type': 'string',
|
||||
'description': 'RFC 5545 3.8.1.12',
|
||||
},
|
||||
'DESCRIPTION': {
|
||||
'type': 'string',
|
||||
'description': 'RFC 5545 3.8.2.5',
|
||||
},
|
||||
'LOCATION': {
|
||||
'type': 'string',
|
||||
'description': 'RFC 5545 3.8.1.7',
|
||||
},
|
||||
'CATEGORY': {'type': 'string'},
|
||||
'TRANSP': {
|
||||
'type': 'boolean',
|
||||
'description': _('Transparent if true else opaque (RFC 5545 3.8.2.7)'),
|
||||
},
|
||||
'RRULE': {
|
||||
'description': _('Recurrence rule (RFC 5545 3.8.5.3)'),
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'FREQ': {
|
||||
'type': 'string',
|
||||
'enum': ['WEEKLY', 'MONTHLY', 'YEARLY'],
|
||||
},
|
||||
'BYDAY': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'string',
|
||||
'enum': ['MO', 'TU', 'WE', 'TH', 'FR', 'SA', 'SU'],
|
||||
},
|
||||
},
|
||||
'BYMONTH': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'integer',
|
||||
'minimum': 1,
|
||||
'maximum': 12,
|
||||
},
|
||||
},
|
||||
'COUNT': {
|
||||
'type': 'integer',
|
||||
'minimum': 1,
|
||||
},
|
||||
'UNTIL': {
|
||||
'type': 'string',
|
||||
'description': _('Date or date and time indicating the end of recurrence'),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
USERNAME_PARAM = {
|
||||
'description': _('The calendar\'s owner username'),
|
||||
'type': 'string',
|
||||
}
|
||||
|
||||
EVENT_UID_PARAM = {
|
||||
'description': _('An event UID'),
|
||||
'type': 'string',
|
||||
}
|
||||
|
||||
|
||||
# Action's request body schema
|
||||
EVENT_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': _('Event description schema'),
|
||||
'unflatten': True,
|
||||
**EVENT_SCHEMA_PART,
|
||||
}
|
||||
|
||||
|
||||
def clean_egw_response(response, *args, **kwargs):
|
||||
'''requests hooks that modify requests's responses deleting
|
||||
EGW's SQL log lines when there is some
|
||||
|
||||
SQL log lines are matched by checking that they :
|
||||
- startswith "==> SQL =>"
|
||||
- endswith "<br>"
|
||||
'''
|
||||
response._content = b'\n'.join(
|
||||
line
|
||||
for line in response.content.split(b'\n')
|
||||
if not line.startswith(b'==> SQL =>') or not line.endswith(b'<br>')
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
class CalDAV(BaseResource):
|
||||
dav_url = models.URLField(
|
||||
blank=False,
|
||||
verbose_name=_('DAV root URL'),
|
||||
help_text=_('DAV root URL (such as https://test.egw/groupdav.php/)'),
|
||||
)
|
||||
dav_login = models.CharField(max_length=128, verbose_name=_('DAV username'), blank=False)
|
||||
dav_password = models.CharField(max_length=512, verbose_name=_('DAV password'), blank=False)
|
||||
|
||||
category = _('Misc')
|
||||
|
||||
log_requests_errors = False
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('CalDAV')
|
||||
|
||||
@functools.cached_property
|
||||
def dav_client(self):
|
||||
'''Instanciate a caldav.DAVCLient and return the instance'''
|
||||
client = caldav.DAVClient(self.dav_url, username=self.dav_login, password=self.dav_password)
|
||||
# Replace DAVClient.session requests.Session instance by our
|
||||
# own requests session in order to log DAV interactions
|
||||
client.session = self.requests
|
||||
# adds EGW response cleaning hook
|
||||
self.requests.hooks['response'] = clean_egw_response
|
||||
return client
|
||||
|
||||
def check_status(self):
|
||||
'''Attempt a propfind on DAV root URL'''
|
||||
try:
|
||||
rep = self.dav_client.propfind()
|
||||
rep.find_objects_and_props()
|
||||
except caldav.lib.error.AuthorizationError:
|
||||
raise Exception(_('Not authorized: bad login/password ?'))
|
||||
|
||||
@endpoint(
|
||||
name='event',
|
||||
pattern='^create$',
|
||||
example_pattern='create',
|
||||
methods=['post'],
|
||||
post={'request_body': {'schema': {'application/json': EVENT_SCHEMA}}},
|
||||
parameters={
|
||||
'username': USERNAME_PARAM,
|
||||
},
|
||||
)
|
||||
def create_event(self, request, username, post_data):
|
||||
'''Event creation endpoint'''
|
||||
cal = self.get_calendar(username)
|
||||
self._process_event_properties(post_data)
|
||||
|
||||
# Sequence is auto-incremented when saved, -1 will lead to the
|
||||
# expected SEQUENCE:0 when an event is created
|
||||
post_data['SEQUENCE'] = -1
|
||||
try:
|
||||
evt = cal.save_event(**post_data)
|
||||
except requests.exceptions.RequestException as expt:
|
||||
raise APIError(
|
||||
_('Error sending creation request to caldav server'),
|
||||
data={
|
||||
'expt_class': str(type(expt)),
|
||||
'expt': str(expt),
|
||||
'username': username,
|
||||
},
|
||||
)
|
||||
except caldav.lib.error.DAVError as expt:
|
||||
raise APIError(
|
||||
_('Error creating event'),
|
||||
data={'expt_class': str(type(expt)), 'expt': exception_to_text(expt), 'username': username},
|
||||
)
|
||||
return {'data': {'event_id': evt.id}}
|
||||
|
||||
# Patch do not support request_body validation, using post instead
|
||||
@endpoint(
|
||||
name='event',
|
||||
pattern='^update$',
|
||||
example_pattern='update',
|
||||
methods=['post'],
|
||||
post={'request_body': {'schema': {'application/json': EVENT_SCHEMA}}},
|
||||
parameters={
|
||||
'username': USERNAME_PARAM,
|
||||
'event_id': EVENT_UID_PARAM,
|
||||
},
|
||||
)
|
||||
def update_event(self, request, username, event_id, post_data):
|
||||
'''Event update endpoint'''
|
||||
self._process_event_properties(post_data)
|
||||
ical = self.get_event(username, event_id)
|
||||
|
||||
vevent = ical.icalendar_instance.walk('VEVENT')
|
||||
if not len(vevent) == 1:
|
||||
raise APIError(
|
||||
_('Given event (user:%r uid:%r) do not contains VEVENT component') % (username, event_id),
|
||||
data={
|
||||
'username': username,
|
||||
'event_uid': event_id,
|
||||
'VEVENT': str(vevent),
|
||||
},
|
||||
)
|
||||
vevent = vevent[0]
|
||||
# vevent.update(post_data) do not convert values as expected
|
||||
for k, v in post_data.items():
|
||||
vevent.pop(k)
|
||||
vevent.add(k, v)
|
||||
if 'SEQUENCE' not in vevent:
|
||||
# SEQUENCE is auto-incremented when present
|
||||
# here after a 1st modification the SEQUENCE will be 1 (not 0)
|
||||
vevent['SEQUENCE'] = 0
|
||||
try:
|
||||
# do not use ical.save(no_create=True) : no_create fails on some calDAV
|
||||
ical.save()
|
||||
except requests.exceptions.RequestException as expt:
|
||||
raise APIError(
|
||||
_('Error sending update request to caldav server'),
|
||||
data={
|
||||
'expt_class': str(type(expt)),
|
||||
'expt': str(expt),
|
||||
'username': username,
|
||||
'event_id': event_id,
|
||||
},
|
||||
)
|
||||
return {'data': {'event_id': ical.id}}
|
||||
|
||||
@endpoint(
|
||||
name='event',
|
||||
pattern='^delete$',
|
||||
example_pattern='delete',
|
||||
methods=['delete'],
|
||||
parameters={
|
||||
'username': USERNAME_PARAM,
|
||||
'event_id': EVENT_UID_PARAM,
|
||||
},
|
||||
)
|
||||
def delete_event(self, request, username, event_id):
|
||||
ical = self.get_event(username, event_id)
|
||||
try:
|
||||
ical.delete()
|
||||
except requests.exceptions.RequestException as expt:
|
||||
raise APIError(
|
||||
_('Error sending deletion request to caldav server'),
|
||||
data={
|
||||
'expt_class': str(type(expt)),
|
||||
'expt': str(expt),
|
||||
'username': username,
|
||||
'event_id': event_id,
|
||||
},
|
||||
)
|
||||
return {}
|
||||
|
||||
def get_event(self, username, event_uid):
|
||||
'''Fetch an event given a username and an event_uid
|
||||
Arguments:
|
||||
- username: Calendar owner's username
|
||||
- event_uid: The event's UID
|
||||
|
||||
Returns an caldav.Event instance
|
||||
'''
|
||||
event_path = '%s/calendar/%s.ics' % (urllib.parse.quote(username), urllib.parse.quote(str(event_uid)))
|
||||
cal = self.get_calendar(username)
|
||||
try:
|
||||
ical = cal.event_by_url(event_path)
|
||||
except caldav.lib.error.DAVError as expt:
|
||||
raise APIError(
|
||||
_('Unable to get event %r in calendar owned by %r') % (event_uid, username),
|
||||
data={
|
||||
'expt': exception_to_text(expt),
|
||||
'expt_cls': str(type(expt)),
|
||||
'username': username,
|
||||
'event_uid': event_uid,
|
||||
},
|
||||
)
|
||||
except requests.exceptions.RequestException as expt:
|
||||
raise APIError(
|
||||
_('Unable to communicate with caldav server while fetching event'),
|
||||
data={
|
||||
'expt': exception_to_text(expt),
|
||||
'expt_class': str(type(expt)),
|
||||
'username': username,
|
||||
'event_uid': event_uid,
|
||||
},
|
||||
)
|
||||
return ical
|
||||
|
||||
def get_calendar(self, username):
|
||||
'''Given a username returns the associated calendar set
|
||||
Arguments:
|
||||
- username: Calendar owner's username
|
||||
|
||||
Returns A caldav.Calendar instance
|
||||
Note: do not raise any caldav exception before a method trying to make
|
||||
a request is called
|
||||
'''
|
||||
path = '%s/calendar' % urllib.parse.quote(username)
|
||||
calendar = caldav.Calendar(client=self.dav_client, url=path)
|
||||
return calendar
|
||||
|
||||
def _process_event_properties(self, data):
|
||||
'''Handles verification & convertion of event properties
|
||||
@note Modify given data dict inplace
|
||||
'''
|
||||
if 'TRANSP' in data:
|
||||
data['TRANSP'] = 'TRANSPARENT' if data['TRANSP'] else 'OPAQUE'
|
||||
|
||||
if 'CATEGORY' in data:
|
||||
data['CATEGORIES'] = [data.pop('CATEGORY')]
|
||||
|
||||
if 'RRULE' in data and 'UNTIL' in data['RRULE']:
|
||||
try:
|
||||
data['RRULE']['UNTIL'] = self._parse_date_or_datetime(data['RRULE']['UNTIL'])
|
||||
except ValueError:
|
||||
raise APIError(
|
||||
_('Unable to convert field %(name)s=%(value)r: not a valid date nor date-time')
|
||||
% {'name': 'RRULE/UNTIL', 'value': data['RRULE']['UNTIL']},
|
||||
http_status=400,
|
||||
)
|
||||
|
||||
for dt_field in ('DTSTART', 'DTEND'):
|
||||
if dt_field not in data:
|
||||
continue
|
||||
try:
|
||||
data[dt_field] = self._parse_date_or_datetime(data[dt_field])
|
||||
except ValueError:
|
||||
raise APIError(
|
||||
_('Unable to convert field %(name)s=%(value)r: not a valid date nor date-time')
|
||||
% {'name': dt_field, 'value': data[dt_field]},
|
||||
http_status=400,
|
||||
)
|
||||
|
||||
def _parse_date_or_datetime(self, value):
|
||||
try:
|
||||
ret = parse_date(value) or parse_datetime(value)
|
||||
except ValueError:
|
||||
ret = None
|
||||
if not ret:
|
||||
raise ValueError('Invalid value')
|
||||
return ret
|
|
@ -1,87 +0,0 @@
|
|||
# Generated by Django 3.2.18 on 2024-02-28 09:13
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('base', '0030_resourcelog_base_resour_appname_298cbc_idx'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Carl',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
|
||||
),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
(
|
||||
'basic_auth_username',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication username'
|
||||
),
|
||||
),
|
||||
(
|
||||
'basic_auth_password',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication password'
|
||||
),
|
||||
),
|
||||
(
|
||||
'client_certificate',
|
||||
models.FileField(
|
||||
blank=True, null=True, upload_to='', verbose_name='TLS client certificate'
|
||||
),
|
||||
),
|
||||
(
|
||||
'trusted_certificate_authorities',
|
||||
models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs'),
|
||||
),
|
||||
(
|
||||
'verify_cert',
|
||||
models.BooleanField(blank=True, default=True, verbose_name='TLS verify certificates'),
|
||||
),
|
||||
(
|
||||
'http_proxy',
|
||||
models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'),
|
||||
),
|
||||
(
|
||||
'service_url',
|
||||
models.URLField(
|
||||
help_text='Base webservice URL (such as https://carlsource.server.com/gmaoCS02/',
|
||||
verbose_name='Service URL',
|
||||
),
|
||||
),
|
||||
(
|
||||
'carl_username',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Carl token authentication username'
|
||||
),
|
||||
),
|
||||
(
|
||||
'carl_password',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Carl token authentication password'
|
||||
),
|
||||
),
|
||||
(
|
||||
'users',
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name='_carl_carl_users_+',
|
||||
related_query_name='+',
|
||||
to='base.ApiUser',
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Carl',
|
||||
},
|
||||
),
|
||||
]
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue