Compare commits
1 Commits
main
...
wip/74797-
Author | SHA1 | Date |
---|---|---|
Thomas NOËL | 8d53545b72 |
|
@ -8,5 +8,3 @@ d2c0be039649febded68d9d04f745cd18b2b2e03
|
|||
989fb5271967e8e87fd57837dd6d8cfe932e7ebe
|
||||
# misc: apply djhtml (#69422)
|
||||
6da81964bd91b5656364357ec06776fed3529c8a
|
||||
# misc: apply double-quote-string-fixer (#79788)
|
||||
40142de8d2d9885f7a57f4b0f5ab1a593e13aaca
|
||||
|
|
|
@ -12,7 +12,5 @@ passerelle.egg-info/
|
|||
coverage.xml
|
||||
junit-py*.xml
|
||||
.sass-cache/
|
||||
passerelle/**/static/**/css/style.css
|
||||
passerelle/**/static/**/css/style.css.map
|
||||
node_modules/
|
||||
coverage/
|
||||
passerelle/static/css/style.css
|
||||
passerelle/static/css/style.css.map
|
||||
|
|
|
@ -1,36 +1,32 @@
|
|||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: double-quote-string-fixer
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.3.1
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: ['--keep-percent-format', '--py39-plus']
|
||||
args: ['--keep-percent-format', '--py37-plus']
|
||||
- repo: https://github.com/adamchainz/django-upgrade
|
||||
rev: 1.13.0
|
||||
rev: 1.10.0
|
||||
hooks:
|
||||
- id: django-upgrade
|
||||
args: ['--target-version', '3.2']
|
||||
args: ['--target-version', '2.2']
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.3.0
|
||||
rev: 22.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
args: ['--target-version', 'py39', '--skip-string-normalization', '--line-length', '110']
|
||||
args: ['--target-version', 'py37', '--skip-string-normalization', '--line-length', '110']
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
args: ['--profile', 'black', '--line-length', '110']
|
||||
- repo: https://github.com/rtts/djhtml
|
||||
rev: '3.0.6'
|
||||
rev: 'v1.5.2'
|
||||
hooks:
|
||||
- id: djhtml
|
||||
args: ['--tabwidth', '2']
|
||||
- repo: https://git.entrouvert.org/pre-commit-debian.git
|
||||
rev: v0.3
|
||||
rev: v0.1
|
||||
hooks:
|
||||
- id: pre-commit-debian
|
||||
|
|
|
@ -6,39 +6,20 @@ pipeline {
|
|||
disableConcurrentBuilds()
|
||||
timeout(time: 30, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
max = 100000
|
||||
RAND_TEST = "${Math.abs(new Random().nextInt(max+1))}"
|
||||
}
|
||||
stages {
|
||||
stage('Tests (in parallel)') {
|
||||
failFast true
|
||||
parallel {
|
||||
stage('Unit Tests (pytest)') {
|
||||
steps {
|
||||
sh "NUMPROCESSES=12 RAND_TEST=${env.RAND_TEST} tox -rv"
|
||||
}
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
utils = new Utils()
|
||||
utils.publish_coverage('coverage.xml')
|
||||
utils.publish_coverage_native('index.html')
|
||||
utils.publish_pylint('pylint.out')
|
||||
}
|
||||
mergeJunitResults()
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Unit Tests (vitest)') {
|
||||
steps {
|
||||
sh "NUMPROCESSES=12 RAND_TEST=${env.RAND_TEST} tox -rv -e vitest"
|
||||
}
|
||||
}
|
||||
stage('Linter (pylint)') {
|
||||
steps {
|
||||
sh "NUMPROCESSES=12 RAND_TEST=${env.RAND_TEST} tox -rv -e pylint"
|
||||
stage('Unit Tests') {
|
||||
steps {
|
||||
sh 'NUMPROCESSES=6 tox -rv '
|
||||
}
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
utils = new Utils()
|
||||
utils.publish_coverage('coverage.xml')
|
||||
utils.publish_coverage_native('index.html')
|
||||
utils.publish_pylint('pylint.out')
|
||||
}
|
||||
mergeJunitResults()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -54,9 +35,9 @@ pipeline {
|
|||
'''
|
||||
).trim()
|
||||
if (env.GIT_BRANCH == 'main' || env.GIT_BRANCH == 'origin/main') {
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye,bookworm ${SHORT_JOB_NAME}"
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye ${SHORT_JOB_NAME}"
|
||||
} else if (env.GIT_BRANCH.startsWith('hotfix/')) {
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye,bookworm --branch ${env.GIT_BRANCH} --hotfix ${SHORT_JOB_NAME}"
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye --branch ${env.GIT_BRANCH} --hotfix ${SHORT_JOB_NAME}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
17
README
17
README
|
@ -76,7 +76,7 @@ djhtml is used to automatically indent html files, using those parameters:
|
|||
|
||||
django-upgrade is used to automatically upgrade Django syntax, using those parameters:
|
||||
|
||||
django-upgrade --target-version 3.2
|
||||
django-upgrade --target-version 2.2
|
||||
|
||||
There is .pre-commit-config.yaml to use pre-commit to automatically run these tools
|
||||
before commits. (execute `pre-commit install` to install the git hook.)
|
||||
|
@ -126,18 +126,3 @@ django-jsonresponse (https://github.com/jjay/django-jsonresponse)
|
|||
# Files: passerelle/utils/jsonresponse.py
|
||||
# Copyright (c) 2012 Yasha Borevich <j.borevich@gmail.com>
|
||||
# Licensed under the BSD license
|
||||
|
||||
tweetnacl-js (https://github.com/dchest/tweetnacl-js)
|
||||
# Files: passerelle/apps/qrcode/static/qrcode/js/nacl.min.js
|
||||
# Copyright: https://github.com/dchest/tweetnacl-js/blob/master/AUTHORS.md
|
||||
# Licensed under the Unlicense license (public domain)
|
||||
|
||||
zxing-browser (https://github.com/zxing-js/browser/)
|
||||
# Files: passerelle/apps/qrcode/static/qrcode/js/zxing-browser.min.js
|
||||
# Copyright: (c) 2018 ZXing for JS
|
||||
# Licensed under the MIT license.
|
||||
|
||||
RemixIcon (https://github.com/Remix-Design/RemixIcon)
|
||||
# Files: passerelle/apps/qrcode/static/qrcode/img/favicon.ico
|
||||
# Copyright (c) 2020 RemixIcon.com
|
||||
# Licensed under the Apache License Version 2.0
|
||||
|
|
|
@ -13,15 +13,11 @@ Homepage: https://dev.entrouvert.org/projects/passerelle
|
|||
|
||||
Package: python3-passerelle
|
||||
Architecture: all
|
||||
Depends: ghostscript,
|
||||
pdftk,
|
||||
poppler-utils,
|
||||
python3-caldav,
|
||||
Depends: pdftk,
|
||||
python3-cmislib,
|
||||
python3-cryptography,
|
||||
python3-dateutil,
|
||||
python3-distutils,
|
||||
python3-django (>= 2:3.2),
|
||||
python3-django (>= 2:2.2),
|
||||
python3-django-model-utils,
|
||||
python3-feedparser,
|
||||
python3-gadjo,
|
||||
|
@ -45,7 +41,6 @@ Depends: ghostscript,
|
|||
python3-uwsgidecorators,
|
||||
python3-vobject,
|
||||
python3-xmlschema,
|
||||
python3-xmltodict,
|
||||
python3-zeep (>= 3.2),
|
||||
${misc:Depends},
|
||||
${python3:Depends},
|
||||
|
@ -63,9 +58,8 @@ Depends: adduser,
|
|||
uwsgi,
|
||||
uwsgi-plugin-python3,
|
||||
${misc:Depends},
|
||||
Recommends: memcached,
|
||||
nginx,
|
||||
Suggests: postgresql,
|
||||
Breaks: python-passerelle (<<5.75.post9),
|
||||
Replaces: python-passerelle (<<5.75.post9),
|
||||
Recommends: memcached, nginx
|
||||
Suggests: postgresql
|
||||
Breaks: python-passerelle (<<5.75.post9)
|
||||
Replaces: python-passerelle (<<5.75.post9)
|
||||
Description: Uniform access to multiple data sources and services
|
||||
|
|
|
@ -36,11 +36,6 @@ LOGGING['loggers']['paramiko.transport'] = {
|
|||
'propagate': True,
|
||||
}
|
||||
|
||||
# silence pdfrw
|
||||
LOGGING['loggers']['pdfrw'] = {
|
||||
'propagate': False,
|
||||
}
|
||||
|
||||
exec(open('/etc/%s/settings.py' % PROJECT_NAME).read())
|
||||
|
||||
# run additional settings snippets
|
||||
|
|
|
@ -4,7 +4,6 @@ After=network.target postgresql.service
|
|||
Wants=postgresql.service
|
||||
|
||||
[Service]
|
||||
SyslogIdentifier=uwsgi/%p
|
||||
Environment=PASSERELLE_SETTINGS_FILE=/usr/lib/%p/debian_config.py
|
||||
Environment=PASSERELLE_WSGI_TIMEOUT=120
|
||||
Environment=PASSERELLE_WSGI_WORKERS=5
|
||||
|
|
|
@ -18,7 +18,6 @@ spooler-python-import = passerelle.utils.spooler
|
|||
spooler-max-tasks = 20
|
||||
|
||||
# every five minutes
|
||||
unique-cron = -5 -1 -1 -1 -1 /usr/bin/passerelle-manage tenant_command cron --all-tenants every5min
|
||||
unique-cron = -5 -1 -1 -1 -1 /usr/bin/passerelle-manage tenant_command cron --all-tenants availability
|
||||
unique-cron = -5 -1 -1 -1 -1 /usr/bin/passerelle-manage tenant_command cron --all-tenants jobs
|
||||
# hourly
|
||||
|
|
|
@ -2,23 +2,23 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption('--url', help='Url of a passerelle Caluire Axel connector instance')
|
||||
parser.addoption('--nameid', help='Publik Name ID')
|
||||
parser.addoption('--firstname', help='first name of a user')
|
||||
parser.addoption('--lastname', help='Last name of a user')
|
||||
parser.addoption('--family', help='Family ID')
|
||||
parser.addoption("--url", help="Url of a passerelle Caluire Axel connector instance")
|
||||
parser.addoption("--nameid", help="Publik Name ID")
|
||||
parser.addoption("--firstname", help="first name of a user")
|
||||
parser.addoption("--lastname", help="Last name of a user")
|
||||
parser.addoption("--family", help="Family ID")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption('--url')
|
||||
return request.config.getoption("--url")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def user(request):
|
||||
return {
|
||||
'name_id': request.config.getoption('--nameid'),
|
||||
'first_name': request.config.getoption('--firstname'),
|
||||
'last_name': request.config.getoption('--lastname'),
|
||||
'family': request.config.getoption('--family'),
|
||||
'name_id': request.config.getoption("--nameid"),
|
||||
'first_name': request.config.getoption("--firstname"),
|
||||
'last_name': request.config.getoption("--lastname"),
|
||||
'family': request.config.getoption("--family"),
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ def test_link(conn, user):
|
|||
'NOM': user['last_name'],
|
||||
'PRENOM': user['first_name'],
|
||||
}
|
||||
print('Creating link with the following payload:')
|
||||
print("Creating link with the following payload:")
|
||||
pprint.pprint(payload)
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
|
@ -21,7 +21,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print('GET family info')
|
||||
print("GET family info")
|
||||
url = conn + '/family_info?NameID=%s' % name_id
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -30,7 +30,7 @@ def test_link(conn, user):
|
|||
assert data['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print('GET children info')
|
||||
print("GET children info")
|
||||
url = conn + '/children_info?NameID=%s' % (name_id)
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -40,7 +40,7 @@ def test_link(conn, user):
|
|||
print('\n')
|
||||
|
||||
for child in data['data']['MEMBRE']:
|
||||
print('GET child info')
|
||||
print("GET child info")
|
||||
url = conn + '/child_info?NameID=%s&idpersonne=%s' % (name_id, child['IDENT'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -49,7 +49,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print('and GET school info')
|
||||
print("and GET school info")
|
||||
url = conn + '/child_schooling_info?NameID=%s&idpersonne=%s&schooling_date=%s' % (
|
||||
name_id,
|
||||
child['IDENT'],
|
||||
|
@ -62,7 +62,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print('and GET activities info')
|
||||
print("and GET activities info")
|
||||
url = conn + '/child_activities_info?NameID=%s&idpersonne=%s&schooling_date=%s' % (
|
||||
name_id,
|
||||
child['IDENT'],
|
||||
|
@ -75,7 +75,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print('GET school list')
|
||||
print("GET school list")
|
||||
url = conn + '/school_list'
|
||||
payload = {
|
||||
'num': data['data']['RESPONSABLE1']['ADRESSE']['NORUE'],
|
||||
|
@ -92,7 +92,7 @@ def test_link(conn, user):
|
|||
print('\n')
|
||||
return
|
||||
|
||||
print('Deleting link')
|
||||
print("Deleting link")
|
||||
url = conn + '/unlink?NameID=%s' % name_id
|
||||
resp = requests.post(url)
|
||||
resp.raise_for_status()
|
||||
|
|
|
@ -5,25 +5,25 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption('--cmis-connector-url', help='Url of a passerelle CMIS connector instance')
|
||||
parser.addoption('--cmis-endpoint', help='Url of a passerelle CMIS endpoint')
|
||||
parser.addoption('--cmis-username', help='Username for the CMIS endpoint')
|
||||
parser.addoption('--cmis-password', help='Password for the CMIS endpoint')
|
||||
parser.addoption('--preserve-tree', action='store_true', default=False, help='Preserve test directory')
|
||||
parser.addoption("--cmis-connector-url", help="Url of a passerelle CMIS connector instance")
|
||||
parser.addoption("--cmis-endpoint", help="Url of a passerelle CMIS endpoint")
|
||||
parser.addoption("--cmis-username", help="Username for the CMIS endpoint")
|
||||
parser.addoption("--cmis-password", help="Password for the CMIS endpoint")
|
||||
parser.addoption("--preserve-tree", action="store_true", default=False, help="Preserve test directory")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def cmisclient(request):
|
||||
return cmislib.CmisClient(
|
||||
request.config.getoption('--cmis-endpoint'),
|
||||
request.config.getoption('--cmis-username'),
|
||||
request.config.getoption('--cmis-password'),
|
||||
request.config.getoption("--cmis-endpoint"),
|
||||
request.config.getoption("--cmis-username"),
|
||||
request.config.getoption("--cmis-password"),
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def cmis_connector(request):
|
||||
return request.config.getoption('--cmis-connector-url')
|
||||
return request.config.getoption("--cmis-connector-url")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
|
@ -31,6 +31,6 @@ def cmis_tmpdir(cmisclient, request):
|
|||
path = 'test-%s' % random.randint(0, 10000)
|
||||
folder = cmisclient.defaultRepository.rootFolder.createFolder(path)
|
||||
yield folder.properties['cmis:path']
|
||||
preserve_tree = request.config.getoption('--preserve-tree')
|
||||
preserve_tree = request.config.getoption("--preserve-tree")
|
||||
if not preserve_tree:
|
||||
folder.deleteTree()
|
||||
|
|
|
@ -10,7 +10,7 @@ SPECIAL_CHARS = '!#$%&+-^_`;[]{}+='
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'path,file_name',
|
||||
"path,file_name",
|
||||
[
|
||||
('', 'some.file'),
|
||||
('/toto', 'some.file'),
|
||||
|
@ -31,8 +31,8 @@ def test_uploadfile(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, monkeypatch
|
|||
response = requests.post(
|
||||
url,
|
||||
json={
|
||||
'path': cmis_tmpdir + path,
|
||||
'file': {'content': file_b64_content, 'filename': file_name, 'content_type': 'image/jpeg'},
|
||||
"path": cmis_tmpdir + path,
|
||||
"file": {"content": file_b64_content, "filename": file_name, "content_type": "image/jpeg"},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
@ -59,8 +59,8 @@ def test_uploadfile_conflict(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, mo
|
|||
response = requests.post(
|
||||
url,
|
||||
json={
|
||||
'path': cmis_tmpdir + '/uploadconflict',
|
||||
'file': {'content': file_b64_content, 'filename': 'some.file', 'content_type': 'image/jpeg'},
|
||||
"path": cmis_tmpdir + '/uploadconflict',
|
||||
"file": {"content": file_b64_content, "filename": 'some.file', "content_type": "image/jpeg"},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
@ -70,11 +70,11 @@ def test_uploadfile_conflict(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, mo
|
|||
response = requests.post(
|
||||
url,
|
||||
json={
|
||||
'path': cmis_tmpdir + '/uploadconflict',
|
||||
'file': {'content': file_b64_content, 'filename': 'some.file', 'content_type': 'image/jpeg'},
|
||||
"path": cmis_tmpdir + '/uploadconflict',
|
||||
"file": {"content": file_b64_content, "filename": 'some.file', "content_type": "image/jpeg"},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
resp_data = response.json()
|
||||
assert resp_data['err'] == 1
|
||||
assert resp_data['err_desc'].startswith('update conflict')
|
||||
assert resp_data['err_desc'].startswith("update conflict")
|
||||
|
|
|
@ -2,9 +2,9 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption('--url', help='Url of a passerelle Planitech connector instance')
|
||||
parser.addoption("--url", help="Url of a passerelle Planitech connector instance")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption('--url')
|
||||
return request.config.getoption("--url")
|
||||
|
|
|
@ -113,7 +113,7 @@ def test_main(conn):
|
|||
|
||||
|
||||
def call_generic(conn, endpoint):
|
||||
print('%s \n' % endpoint)
|
||||
print("%s \n" % endpoint)
|
||||
url = conn + '/%s' % endpoint
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
|
|
@ -2,25 +2,25 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption('--url', help='Url of a passerelle Toulouse Axel connector instance')
|
||||
parser.addoption('--nameid', help='Publik Name ID')
|
||||
parser.addoption('--firstname', help='first name of a user')
|
||||
parser.addoption('--lastname', help='Last name of a user')
|
||||
parser.addoption('--dob', help='Date of birth of a user')
|
||||
parser.addoption('--dui', help='DUI number')
|
||||
parser.addoption("--url", help="Url of a passerelle Toulouse Axel connector instance")
|
||||
parser.addoption("--nameid", help="Publik Name ID")
|
||||
parser.addoption("--firstname", help="first name of a user")
|
||||
parser.addoption("--lastname", help="Last name of a user")
|
||||
parser.addoption("--dob", help="Date of birth of a user")
|
||||
parser.addoption("--dui", help="DUI number")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption('--url')
|
||||
return request.config.getoption("--url")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def user(request):
|
||||
return {
|
||||
'name_id': request.config.getoption('--nameid'),
|
||||
'first_name': request.config.getoption('--firstname'),
|
||||
'last_name': request.config.getoption('--lastname'),
|
||||
'dob': request.config.getoption('--dob'),
|
||||
'dui': request.config.getoption('--dui'),
|
||||
'name_id': request.config.getoption("--nameid"),
|
||||
'first_name': request.config.getoption("--firstname"),
|
||||
'last_name': request.config.getoption("--lastname"),
|
||||
'dob': request.config.getoption("--dob"),
|
||||
'dui': request.config.getoption("--dui"),
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ import requests
|
|||
|
||||
|
||||
def test_link(conn, user):
|
||||
print('Get update management dates')
|
||||
print("Get update management dates")
|
||||
url = conn + '/management_dates'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -21,7 +21,7 @@ def test_link(conn, user):
|
|||
'PRENOM': user['first_name'],
|
||||
'NAISSANCE': user['dob'],
|
||||
}
|
||||
print('Creating link with the following payload:')
|
||||
print("Creating link with the following payload:")
|
||||
pprint.pprint(payload)
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
|
@ -30,7 +30,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print('GET family info')
|
||||
print("GET family info")
|
||||
url = conn + '/family_info?NameID=%s' % name_id
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -158,7 +158,7 @@ def test_link(conn, user):
|
|||
for key in flags:
|
||||
payload[key] = True
|
||||
|
||||
print('Update family info with the following payload:')
|
||||
print("Update family info with the following payload:")
|
||||
pprint.pprint(payload)
|
||||
url = conn + '/update_family_info?NameID=%s' % name_id
|
||||
resp = requests.post(url, json=payload)
|
||||
|
@ -168,7 +168,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print('GET children info')
|
||||
print("GET children info")
|
||||
url = conn + '/children_info?NameID=%s' % (name_id)
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -178,7 +178,7 @@ def test_link(conn, user):
|
|||
print('\n')
|
||||
|
||||
for child in data['data']['ENFANT']:
|
||||
print('GET child info')
|
||||
print("GET child info")
|
||||
url = conn + '/child_info?NameID=%s&idpersonne=%s' % (name_id, child['IDPERSONNE'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -187,7 +187,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print('GET child contact info')
|
||||
print("GET child contact info")
|
||||
url = conn + '/child_contacts_info?NameID=%s&idpersonne=%s' % (name_id, child['IDPERSONNE'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -196,7 +196,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print('Deleting link')
|
||||
print("Deleting link")
|
||||
url = conn + '/unlink?NameID=%s' % name_id
|
||||
resp = requests.post(url)
|
||||
resp.raise_for_status()
|
||||
|
|
|
@ -15,39 +15,27 @@ from zeep.helpers import serialize_object
|
|||
FAMILY_PAYLOAD = {
|
||||
'category': 'BI',
|
||||
'situation': 'MARI',
|
||||
'nbChild': '3',
|
||||
'nbTotalChild': '4',
|
||||
'nbAES': '1',
|
||||
'rl1': {
|
||||
'civility': 'MME',
|
||||
'firstname': 'Marge',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'maidenName': 'Bouvier',
|
||||
'quality': 'MERE',
|
||||
'birth': {
|
||||
'dateBirth': '1950-10-01',
|
||||
'countryCode': '404',
|
||||
},
|
||||
'birth': {'dateBirth': '1950-10-01'},
|
||||
'adresse': {
|
||||
'idStreet': '2317',
|
||||
'num': '4',
|
||||
'street1': 'requeried having idStreet provided',
|
||||
'town': 'Toulouse',
|
||||
'zipcode': '31400',
|
||||
'town': 'Springfield',
|
||||
'zipcode': '62701',
|
||||
},
|
||||
},
|
||||
'rl2': {
|
||||
'civility': 'MR',
|
||||
'firstname': 'Homer',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'quality': 'PERE',
|
||||
'birth': {
|
||||
'dateBirth': '1956-05-12',
|
||||
'place': 'Brive-la-Gaillarde',
|
||||
'communeCode': '19031',
|
||||
'cdDepartment': '19',
|
||||
'countryCode': '',
|
||||
},
|
||||
'birth': {'dateBirth': '1956-05-12'},
|
||||
'adresse': {
|
||||
'num': '742',
|
||||
'numComp': None,
|
||||
|
@ -96,14 +84,8 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'M',
|
||||
'firstname': 'Bart',
|
||||
'lastname': 'Test_Simpson',
|
||||
'birth': {
|
||||
'dateBirth': '2014-04-01',
|
||||
'place': 'Brive-la-Gaillarde',
|
||||
'communeCode': '19031',
|
||||
'cdDepartment': '19',
|
||||
'countryCode': '',
|
||||
},
|
||||
'lastname': 'Simpson',
|
||||
'birth': {'dateBirth': '2014-04-01'},
|
||||
'bPhoto': True,
|
||||
'bLeaveAlone': True,
|
||||
'dietcode': 'MENU_AV',
|
||||
|
@ -133,11 +115,11 @@ FAMILY_PAYLOAD = {
|
|||
'hospital': 'Springfield General Hospital',
|
||||
'vaccinList': [
|
||||
{
|
||||
'code': '8',
|
||||
'code': '45',
|
||||
'vaccinationDate': '2011-01-11',
|
||||
},
|
||||
{
|
||||
'code': '1',
|
||||
'code': '24',
|
||||
'vaccinationDate': '2022-02-22',
|
||||
},
|
||||
],
|
||||
|
@ -158,7 +140,7 @@ FAMILY_PAYLOAD = {
|
|||
'personInfo': {
|
||||
'civility': 'MR',
|
||||
'firstname': 'Abraham Jebediah',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'dateBirth': '1927-05-24',
|
||||
'sexe': 'M',
|
||||
'contact': {
|
||||
|
@ -175,7 +157,7 @@ FAMILY_PAYLOAD = {
|
|||
'personInfo': {
|
||||
'civility': 'MME',
|
||||
'firstname': 'Mona Penelope',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'dateBirth': '1929-03-15',
|
||||
'sexe': 'F',
|
||||
'contact': {
|
||||
|
@ -193,7 +175,7 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'F',
|
||||
'firstname': 'Lisa',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'birth': {'dateBirth': '2016-05-09'},
|
||||
'dietcode': 'MENU_SV',
|
||||
'paiInfoBean': {
|
||||
|
@ -203,7 +185,7 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'F',
|
||||
'firstname': 'Maggie',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'birth': {'dateBirth': '2018-12-17'},
|
||||
'dietcode': 'MENU_PAI',
|
||||
'paiInfoBean': {
|
||||
|
@ -213,7 +195,7 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'M',
|
||||
'firstname': 'Hugo',
|
||||
'lastname': 'Test_Simpson',
|
||||
'lastname': 'Simpson',
|
||||
'birth': {'dateBirth': '2018-04-01'},
|
||||
'dietcode': 'MENU_AV',
|
||||
'paiInfoBean': {
|
||||
|
@ -261,10 +243,7 @@ def pytest_addoption(parser):
|
|||
parser.addoption('--nameid', help='Publik Name ID', default='functest')
|
||||
parser.addoption('--dui', help='DUI number', default='')
|
||||
parser.addoption(
|
||||
'--lastname', help='override lastname to create a new "update" family', default='Test_Simpson'
|
||||
)
|
||||
parser.addoption(
|
||||
'--quick', action='store_true', help='do not reload referentials to speed-up tests', default=False
|
||||
'--lastname', help='override lastname to create a new "update" family', default='Simpson'
|
||||
)
|
||||
|
||||
|
||||
|
@ -340,7 +319,6 @@ def remove_id_on_child(conn, child):
|
|||
del child['indicators'] # order may change
|
||||
child['subscribeSchoolList'] = [] # not managed by test yet
|
||||
child['subscribeActivityList'] = [] # not managed by test yet
|
||||
del child['subscribe_natures'] # order may change
|
||||
|
||||
|
||||
def remove_id_on_rlg(conn, rlg):
|
||||
|
@ -349,11 +327,7 @@ def remove_id_on_rlg(conn, rlg):
|
|||
rlg['lastname'] = 'N/A'
|
||||
remove_extra_indicators(conn, rlg['indicatorList'], 'rl-indicator')
|
||||
rlg['indicatorList'].sort(key=lambda x: x['code'])
|
||||
rlg['quotientList'].sort(key=lambda x: (x['yearRev'], x['dateStart']))
|
||||
del rlg['indicators'] # order may change
|
||||
del rlg['quotients'] # order may change
|
||||
rlg['subscribeActivityList'] = [] # not managed by test yet
|
||||
del rlg['subscribe_natures'] # order may change
|
||||
|
||||
|
||||
def remove_id_on_family(conn, family):
|
||||
|
@ -406,10 +380,7 @@ def conn(request):
|
|||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def referentials(request, conn):
|
||||
quick = request.config.getoption('--quick')
|
||||
if quick:
|
||||
return
|
||||
def referentials(conn):
|
||||
url = urlparse.urlparse(conn)
|
||||
slug = url.path.split('/')[2]
|
||||
cmd = (
|
||||
|
@ -422,10 +393,10 @@ def referentials(request, conn):
|
|||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def create_data(request, conn, reference_year):
|
||||
def create_data(request, conn):
|
||||
name_id = request.config.getoption('--nameid')
|
||||
unlink(conn, name_id)
|
||||
lastname = 'TEST_' + uuid4().hex[0:25]
|
||||
lastname = uuid4().hex[0:30]
|
||||
|
||||
# create family
|
||||
create_family_payload = copy.deepcopy(FAMILY_PAYLOAD)
|
||||
|
@ -449,21 +420,6 @@ def create_data(request, conn, reference_year):
|
|||
resp.raise_for_status()
|
||||
create_result = resp.json()
|
||||
assert create_result['err'] == 0
|
||||
|
||||
# add requiered quotient for subscriptions
|
||||
data = read_family(conn, name_id)
|
||||
url = conn + '/update-quotient?NameID=%s&rl_id=%s' % (name_id, data['RL1']['num'])
|
||||
payload = {
|
||||
'yearRev': str(reference_year),
|
||||
'dateStart': '%s-09-01' % (reference_year),
|
||||
'dateEnd': '3000-08-31',
|
||||
'mtt': '5000.0',
|
||||
'cdquo': '1',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
print('\ncreate DUI: %s' % str(create_result['data']['number']))
|
||||
data = diff_family(conn, name_id, 'test_create_family.json')
|
||||
|
||||
|
@ -474,60 +430,6 @@ def create_data(request, conn, reference_year):
|
|||
'lastname': lastname,
|
||||
'rl1_num': data['RL1']['num'],
|
||||
'bart_num': data['childList'][0]['num'],
|
||||
'maggie_num': data['childList'][1]['num'],
|
||||
'hugo_num': data['childList'][2]['num'],
|
||||
'data': data,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def create_data2(request, conn, reference_year):
|
||||
name_id = request.config.getoption('--nameid')
|
||||
unlink(conn, name_id)
|
||||
lastname = 'TEST_' + uuid4().hex[0:25]
|
||||
|
||||
# create family that is not located into Toulouse
|
||||
create_family_payload = copy.deepcopy(FAMILY_PAYLOAD)
|
||||
create_family_payload['rl1']['lastname'] = lastname
|
||||
create_family_payload['rl1']['adresse'] = create_family_payload['rl2']['adresse']
|
||||
create_family_payload['rl2']['adresse'] = copy.deepcopy(FAMILY_PAYLOAD['rl1']['adresse'])
|
||||
for child in create_family_payload['childList']:
|
||||
child['lastname'] = lastname
|
||||
|
||||
url = conn + '/create-family?NameID=%s' % name_id
|
||||
resp = requests.post(url, json=create_family_payload)
|
||||
resp.raise_for_status()
|
||||
create_result = resp.json()
|
||||
assert create_result['err'] == 0
|
||||
|
||||
# add requiered quotient for subscriptions
|
||||
data = read_family(conn, name_id)
|
||||
url = conn + '/update-quotient?NameID=%s&rl_id=%s' % (name_id, data['RL1']['num'])
|
||||
payload = {
|
||||
'yearRev': str(reference_year),
|
||||
'dateStart': '2023-05-15',
|
||||
'dateEnd': '3000-12-31',
|
||||
'mtt': '5000.0',
|
||||
'cdquo': '1',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
print('\ncreate DUI again: %s' % str(create_result['data']['number']))
|
||||
data = diff_family(conn, name_id, 'test_create_family_out_town.json')
|
||||
|
||||
return {
|
||||
'name_id': name_id, # linked
|
||||
'family_id': str(create_result['data']['number']),
|
||||
'family_payload': create_family_payload,
|
||||
'lastname': lastname,
|
||||
'rl1_num': data['RL1']['num'],
|
||||
'rl2_num': data['RL2']['num'],
|
||||
'bart_num': data['childList'][0]['num'],
|
||||
'lisa_num': data['childList'][1]['num'],
|
||||
'maggie_num': data['childList'][2]['num'],
|
||||
'hugo_num': data['childList'][3]['num'],
|
||||
'data': data,
|
||||
}
|
||||
|
||||
|
@ -599,232 +501,3 @@ def update_data(request, conn):
|
|||
'maggie_num': data['childList'][2]['num'],
|
||||
'data': data,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def reference_year():
|
||||
some_date = datetime.date.today()
|
||||
if some_date.month <= 8:
|
||||
# between january and august, reference year is the year just before
|
||||
return some_date.year - 1
|
||||
return some_date.year
|
||||
|
||||
|
||||
def get_subscription_info(nature, activity_text, unit_text, place_text, con, name_id, person_id, year):
|
||||
def select_item(resp, text):
|
||||
item = None
|
||||
for item in resp.json()['data']:
|
||||
if item['text'] == text:
|
||||
break
|
||||
else:
|
||||
raise Exception("do not find '%s'" % text)
|
||||
return item
|
||||
|
||||
# select activity
|
||||
url = con + '/get-person-activity-list'
|
||||
params = {
|
||||
'NameID': name_id,
|
||||
'person_id': person_id,
|
||||
'start_date': '%s-09-01' % year,
|
||||
'end_date': '%s-08-31' % (year + 1),
|
||||
'nature': nature,
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
activity = select_item(resp, activity_text)
|
||||
|
||||
# select unit
|
||||
url = con + '/get-person-unit-list'
|
||||
params = {
|
||||
'NameID': name_id,
|
||||
'person_id': person_id,
|
||||
'start_date': '%s-09-01' % year,
|
||||
'end_date': '%s-08-31' % (year + 1),
|
||||
'activity_id': activity['id'],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
unit = select_item(resp, unit_text)
|
||||
|
||||
# select place
|
||||
url = con + '/get-person-place-list'
|
||||
params = {
|
||||
'NameID': name_id,
|
||||
'person_id': person_id,
|
||||
'start_date': '%s-09-01' % year,
|
||||
'end_date': '%s-08-31' % (year + 1),
|
||||
'activity_id': activity['id'],
|
||||
'unit_id': unit['id'],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
place = select_item(resp, place_text)
|
||||
assert place['capacityInfo']['controlOK'] is True
|
||||
|
||||
# check subscription info
|
||||
url = con + '/get-person-subscription-info'
|
||||
params = {
|
||||
'NameID': name_id,
|
||||
'person_id': person_id,
|
||||
'activity_id': activity['id'],
|
||||
'unit_id': unit['id'],
|
||||
'place_id': place['id'],
|
||||
'ref_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
info = resp.json()['data']
|
||||
assert info['controlResult']['controlOK'] is True
|
||||
return {
|
||||
'activity': activity,
|
||||
'unit': unit,
|
||||
'place': place,
|
||||
'info': info,
|
||||
}
|
||||
|
||||
|
||||
def get_loisirs_subscribe_info(con, data, year):
|
||||
return get_subscription_info(
|
||||
'LOISIRS',
|
||||
# Sigec made this loisirs activity available for functests
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES',
|
||||
'MERCREDI - 15h30/17h - 8/15Ans',
|
||||
'ARGOULETS',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
def get_loisirs_subscribe_info3(con, data, year):
|
||||
return get_subscription_info(
|
||||
'LOISIRS',
|
||||
# Sigec made this loisirs activity available for functests
|
||||
'Vitrail Fusing 1/2 Je Adultes',
|
||||
'Inscription annuelle',
|
||||
'Centre Culturel ALBAN MINVILLE',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
def get_extrasco_subscribe_info(con, data, year):
|
||||
return get_subscription_info(
|
||||
'EXTRASCO',
|
||||
# Sigec made this extra-sco activity available for functests
|
||||
'ADL ELEMENTAIRE Maourine Juin',
|
||||
'PUBLIK ADL ELEMENTAIRE Maourine JUIN 22/23(NE PAS UTILISER)',
|
||||
'MAOURINE (la) ELEMENTAIRE',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
def get_extrasco_subscribe_info2(con, data, year):
|
||||
return get_subscription_info(
|
||||
'EXTRASCO',
|
||||
# Sigec made this extra-sco activity available for functests
|
||||
'ADL MATERNELLE Lardenne Juin',
|
||||
'PUBLIK ADL MATER JOURNEE AVEC REPAS',
|
||||
'LARDENNE MATERNELLE',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def loisirs_subscribe_info(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
return get_loisirs_subscribe_info(conn, create_data, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def loisirs_subscribe_info2(conn, create_data2, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
return get_loisirs_subscribe_info(conn, create_data2, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def loisirs_subscribe_info3(conn, create_data2, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
return get_loisirs_subscribe_info3(conn, create_data2, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def extrasco_subscribe_info(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
return get_extrasco_subscribe_info(conn, create_data, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def extrasco_subscribe_info2(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
return get_extrasco_subscribe_info2(conn, create_data, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def extrasco_subscribe_info3(conn, create_data2, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
return get_extrasco_subscribe_info2(conn, create_data2, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def perisco_subscribe_info(conn, create_data, reference_year):
|
||||
'''This fixture is a configuration trick from Sigec
|
||||
as peri-sco should not be available for subscription
|
||||
and as a consequence, should not be displayed from catalogs'''
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
return get_subscription_info(
|
||||
None,
|
||||
# Sigec made this peri-sco activity available for functests
|
||||
'Temps du midi',
|
||||
'TEST TEMPS DU MIDI 22/23',
|
||||
'AMIDONNIERS ELEMENTAIRE',
|
||||
conn,
|
||||
create_data['name_id'],
|
||||
create_data['bart_num'],
|
||||
reference_year,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def perisco_subscribe_adulte_info(conn, create_data2, reference_year):
|
||||
'''This fixture is a configuration trick from Sigec
|
||||
as peri-sco should not be available for subscription
|
||||
and as a consequence, should not be displayed from catalogs'''
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
return get_subscription_info(
|
||||
None,
|
||||
# Sigec made this peri-sco activity available for functests
|
||||
'RESTAURATION ADULTE',
|
||||
'TEST RESTAURATION ADULTE 22/23',
|
||||
'DOLTO FRANCOISE MATERNELLE',
|
||||
conn,
|
||||
create_data2['name_id'],
|
||||
create_data2['bart_num'],
|
||||
reference_year,
|
||||
)
|
||||
|
|
|
@ -7,14 +7,6 @@
|
|||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUTO_OUT",
|
||||
"code": "AUTO_OUT",
|
||||
"text": "Autorisation de sortie - CLAE",
|
||||
"libelle": "Autorisation de sortie - CLAE",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUTRE",
|
||||
"code": "AUTRE",
|
||||
|
@ -24,30 +16,6 @@
|
|||
"isActive": true,
|
||||
"note": "rebellious"
|
||||
},
|
||||
{
|
||||
"id": "AUT_OUTADL",
|
||||
"code": "AUT_OUTADL",
|
||||
"text": "Autorisation de sortie - ADL",
|
||||
"libelle": "Autorisation de sortie - ADL",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUT_SANT",
|
||||
"code": "AUT_SANT",
|
||||
"text": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"libelle": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUT_TRANS",
|
||||
"code": "AUT_TRANS",
|
||||
"text": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"libelle": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AVL",
|
||||
"code": "AVL",
|
||||
|
@ -73,14 +41,6 @@
|
|||
"isActive": false,
|
||||
"note": null
|
||||
},
|
||||
{
|
||||
"id": "HPURG",
|
||||
"code": "HPURG",
|
||||
"text": "Hospitalisation / musures d'urgence",
|
||||
"libelle": "Hospitalisation / musures d'urgence",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "LENTILLE",
|
||||
"code": "LENTILLE",
|
||||
|
|
|
@ -7,8 +7,7 @@
|
|||
"dateBirth": "2016-05-09T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
"countryCode": null
|
||||
},
|
||||
"dietcode": null,
|
||||
"bPhoto": false,
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
"category": "BI",
|
||||
"situation": "MARI",
|
||||
"flagCom": true,
|
||||
"nbChild": 3,
|
||||
"nbTotalChild": 4,
|
||||
"nbAES": "1",
|
||||
"nbChild": null,
|
||||
"nbTotalChild": null,
|
||||
"nbAES": null,
|
||||
"RL1": {
|
||||
"num": "N/A",
|
||||
"firstname": "MARGE",
|
||||
|
@ -17,9 +17,7 @@
|
|||
"dateBirth": "1950-10-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
"countryCode": null
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": "2317",
|
||||
|
@ -27,8 +25,8 @@
|
|||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"town": "Springfield",
|
||||
"zipcode": "62701",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
|
@ -42,17 +40,7 @@
|
|||
"profession": null,
|
||||
"CAFInfo": null,
|
||||
"indicatorList": [],
|
||||
"quotientList": [
|
||||
{
|
||||
"yearRev": 2022,
|
||||
"dateStart": "2022-09-01T00:00:00+02:00",
|
||||
"dateEnd": "3000-08-31T00:00:00+02:00",
|
||||
"mtt": 5000.0,
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
}
|
||||
],
|
||||
"quotientList": [],
|
||||
"subscribeActivityList": [],
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "M\u00e8re"
|
||||
|
@ -67,13 +55,9 @@
|
|||
"sexe": "M",
|
||||
"birth": {
|
||||
"dateBirth": "2014-04-01T00:00:00+02:00",
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": true,
|
||||
|
@ -82,7 +66,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"lastname": "SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
|
@ -104,7 +88,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"lastname": "SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
@ -160,13 +144,13 @@
|
|||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
@ -202,8 +186,7 @@
|
|||
"dateBirth": "2018-12-17T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
"countryCode": null
|
||||
},
|
||||
"dietcode": "MENU_PAI",
|
||||
"bPhoto": false,
|
||||
|
@ -236,8 +219,7 @@
|
|||
"dateBirth": "2018-04-01T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
"countryCode": null
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": false,
|
||||
|
|
|
@ -1,408 +0,0 @@
|
|||
{
|
||||
"number": "N/A",
|
||||
"category": "BI",
|
||||
"situation": "MARI",
|
||||
"flagCom": false,
|
||||
"nbChild": 3,
|
||||
"nbTotalChild": 4,
|
||||
"nbAES": "1",
|
||||
"RL1": {
|
||||
"num": "N/A",
|
||||
"firstname": "MARGE",
|
||||
"lastname": "N/A",
|
||||
"maidenName": "BOUVIER",
|
||||
"quality": "MERE",
|
||||
"civility": "MME",
|
||||
"birth": {
|
||||
"dateBirth": "1950-10-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
"num": 742,
|
||||
"numComp": null,
|
||||
"street1": "Evergreen Terrace",
|
||||
"street2": null,
|
||||
"town": "Springfield",
|
||||
"zipcode": "90701"
|
||||
},
|
||||
"contact": {
|
||||
"phone": null,
|
||||
"mobile": null,
|
||||
"mail": null,
|
||||
"isContactMail": false,
|
||||
"isContactSms": false,
|
||||
"isInvoicePdf": false
|
||||
},
|
||||
"profession": null,
|
||||
"CAFInfo": null,
|
||||
"indicatorList": [],
|
||||
"quotientList": [
|
||||
{
|
||||
"yearRev": 2022,
|
||||
"dateStart": "2023-05-15T00:00:00+02:00",
|
||||
"dateEnd": "3000-12-31T00:00:00+01:00",
|
||||
"mtt": 5000.0,
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
}
|
||||
],
|
||||
"subscribeActivityList": [],
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "M\u00e8re"
|
||||
},
|
||||
"RL2": {
|
||||
"num": "N/A",
|
||||
"firstname": "HOMER",
|
||||
"lastname": "N/A",
|
||||
"maidenName": null,
|
||||
"quality": "PERE",
|
||||
"civility": "MR",
|
||||
"birth": {
|
||||
"dateBirth": "1956-05-12T00:00:00+01:00",
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": "2317",
|
||||
"num": 4,
|
||||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
"phone": "0122222222",
|
||||
"mobile": "0622222222",
|
||||
"mail": "homer.simpson@example.org.com",
|
||||
"isContactMail": true,
|
||||
"isContactSms": true,
|
||||
"isInvoicePdf": true
|
||||
},
|
||||
"profession": {
|
||||
"codeCSP": "46",
|
||||
"profession": "Inspecteur de s\u00e9curit\u00e9",
|
||||
"employerName": "Burns",
|
||||
"phone": "0133333333",
|
||||
"addressPro": {
|
||||
"num": null,
|
||||
"street": null,
|
||||
"zipcode": "90701",
|
||||
"town": "Springfield"
|
||||
},
|
||||
"situation": null,
|
||||
"weeklyHours": null,
|
||||
"codeCSP_text": "EMPLOYES"
|
||||
},
|
||||
"CAFInfo": {
|
||||
"number": "123",
|
||||
"organ": "GENE",
|
||||
"organ_text": "CAF 31"
|
||||
},
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "AVL",
|
||||
"libelle": "Auxiliaire de Vie loisirs",
|
||||
"note": null,
|
||||
"choice": null,
|
||||
"code_text": "Auxiliaire de Vie loisirs"
|
||||
},
|
||||
{
|
||||
"code": "ETABSPEC",
|
||||
"libelle": "Etablissement sp\u00e9cialis\u00e9",
|
||||
"note": "SNPP",
|
||||
"choice": null,
|
||||
"code_text": "Etablissement sp\u00e9cialis\u00e9"
|
||||
}
|
||||
],
|
||||
"quotientList": [],
|
||||
"subscribeActivityList": [],
|
||||
"civility_text": "MONSIEUR",
|
||||
"quality_text": "P\u00e8re"
|
||||
},
|
||||
"quotientList": [],
|
||||
"childList": [
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "BART",
|
||||
"sexe": "M",
|
||||
"birth": {
|
||||
"dateBirth": "2014-04-01T00:00:00+02:00",
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": true,
|
||||
"bLeaveAlone": true,
|
||||
"authorizedPersonList": [
|
||||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
"sexe": "M",
|
||||
"contact": {
|
||||
"phone": "0312345678",
|
||||
"mobile": null,
|
||||
"mail": "abe.simpson@example.org"
|
||||
},
|
||||
"civility_text": "MONSIEUR",
|
||||
"sexe_text": "Masculin"
|
||||
},
|
||||
"personQuality": {
|
||||
"code": "13",
|
||||
"libelle": "Famille",
|
||||
"code_text": "Famille"
|
||||
}
|
||||
},
|
||||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
"sexe": "F",
|
||||
"contact": {
|
||||
"phone": "0412345678",
|
||||
"mobile": "0612345678",
|
||||
"mail": "mona.simpson@example.org"
|
||||
},
|
||||
"civility_text": "MADAME",
|
||||
"sexe_text": "F\u00e9minin"
|
||||
},
|
||||
"personQuality": {
|
||||
"code": "13",
|
||||
"libelle": "Famille",
|
||||
"code_text": "Famille"
|
||||
}
|
||||
}
|
||||
],
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "AUTRE",
|
||||
"libelle": "Autre",
|
||||
"note": "rebellious",
|
||||
"choice": null,
|
||||
"code_text": "Autre"
|
||||
},
|
||||
{
|
||||
"code": "LUNETTE",
|
||||
"libelle": "Port de lunettes",
|
||||
"note": null,
|
||||
"choice": null,
|
||||
"code_text": "Port de lunettes"
|
||||
}
|
||||
],
|
||||
"medicalRecord": {
|
||||
"familyDoctor": {
|
||||
"name": "MONROE",
|
||||
"phone": "0612341234",
|
||||
"address": {
|
||||
"street1": "Alameda",
|
||||
"zipcode": "90701",
|
||||
"town": "Springfield"
|
||||
}
|
||||
},
|
||||
"allergy1": "butterscotch, imitation butterscotch, glow-in-the-dark monster make-up",
|
||||
"allergy2": "shrimp and cauliflower",
|
||||
"comment1": "the shrimp allergy isn't fully identified",
|
||||
"comment2": null,
|
||||
"observ1": "Ay Caramba!",
|
||||
"observ2": "Eat my shorts!",
|
||||
"isAuthHospital": false,
|
||||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_01",
|
||||
"dateDeb": "2022-09-01T00:00:00+02:00",
|
||||
"dateFin": "2023-07-01T00:00:00+02:00",
|
||||
"description": "mischievous, rebellious, misunderstood, disruptive",
|
||||
"code_text": "PAI Alimentaire Int\u00e9gral"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "Masculin",
|
||||
"dietcode_text": "Avec viande"
|
||||
},
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "LISA",
|
||||
"sexe": "F",
|
||||
"birth": {
|
||||
"dateBirth": "2016-05-09T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_SV",
|
||||
"bPhoto": false,
|
||||
"bLeaveAlone": false,
|
||||
"authorizedPersonList": [],
|
||||
"indicatorList": [],
|
||||
"medicalRecord": null,
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_02",
|
||||
"dateDeb": null,
|
||||
"dateFin": null,
|
||||
"description": null,
|
||||
"code_text": "PAI Alimentaire Partiel"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "F\u00e9minin",
|
||||
"dietcode_text": "Sans viande"
|
||||
},
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "MAGGIE",
|
||||
"sexe": "F",
|
||||
"birth": {
|
||||
"dateBirth": "2018-12-17T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_PAI",
|
||||
"bPhoto": false,
|
||||
"bLeaveAlone": false,
|
||||
"authorizedPersonList": [],
|
||||
"indicatorList": [],
|
||||
"medicalRecord": null,
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_02",
|
||||
"dateDeb": null,
|
||||
"dateFin": null,
|
||||
"description": null,
|
||||
"code_text": "PAI Alimentaire Partiel"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "F\u00e9minin",
|
||||
"dietcode_text": "Panier PAI"
|
||||
},
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "HUGO",
|
||||
"sexe": "M",
|
||||
"birth": {
|
||||
"dateBirth": "2018-04-01T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": false,
|
||||
"bLeaveAlone": false,
|
||||
"authorizedPersonList": [],
|
||||
"indicatorList": [],
|
||||
"medicalRecord": null,
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_01",
|
||||
"dateDeb": null,
|
||||
"dateFin": null,
|
||||
"description": null,
|
||||
"code_text": "PAI Alimentaire Int\u00e9gral"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "Masculin",
|
||||
"dietcode_text": "Avec viande"
|
||||
}
|
||||
],
|
||||
"emergencyPersonList": [
|
||||
{
|
||||
"numPerson": "N/A",
|
||||
"civility": "MME",
|
||||
"firstname": "PATTY",
|
||||
"lastname": "BOUVIER",
|
||||
"dateBirth": "1948-08-30T00:00:00+01:00",
|
||||
"sexe": "F",
|
||||
"quality": "13",
|
||||
"contact": {
|
||||
"phone": "0112345678",
|
||||
"mobile": "0612345678",
|
||||
"mail": "patty.bouvier@example.org"
|
||||
},
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "Famille",
|
||||
"sexe_text": "F\u00e9minin"
|
||||
},
|
||||
{
|
||||
"numPerson": "N/A",
|
||||
"civility": "MME",
|
||||
"firstname": "SELMA",
|
||||
"lastname": "BOUVIER",
|
||||
"dateBirth": "1946-04-29T00:00:00+01:00",
|
||||
"sexe": "F",
|
||||
"quality": "13",
|
||||
"contact": {
|
||||
"phone": "0112345678",
|
||||
"mobile": "0612345678",
|
||||
"mail": "selma.bouvier@example.org"
|
||||
},
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "Famille",
|
||||
"sexe_text": "F\u00e9minin"
|
||||
}
|
||||
],
|
||||
"indicatorList": [],
|
||||
"childErrorList": [],
|
||||
"category_text": "BIPARENTALE",
|
||||
"situation_text": "MARIE(E)",
|
||||
"family_id": "N/A"
|
||||
}
|
|
@ -7,13 +7,9 @@
|
|||
"civility": "MR",
|
||||
"birth": {
|
||||
"dateBirth": "1956-05-12T00:00:00+01:00",
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
|
|
|
@ -1,125 +0,0 @@
|
|||
[
|
||||
{
|
||||
"id": "INDI_APE_ENF",
|
||||
"text": "INDI_APE_ENF",
|
||||
"level": "INDI_APE_ENF",
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "APE_COMPO3",
|
||||
"libelle": "CF-0/1 actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HBOTH",
|
||||
"libelle": "SP-handicap parent et fratrie",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HPAR",
|
||||
"libelle": "SP-handicap parents",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_MULTIACC",
|
||||
"libelle": "CF-2 enfants \u00e0 accueillir",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_SITUP",
|
||||
"libelle": "SP-situation particuli\u00e8re personne",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "INDI_APE_FAM",
|
||||
"text": "INDI_APE_FAM",
|
||||
"level": "INDI_APE_FAM",
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "APE_COMPO2",
|
||||
"libelle": "CF-1/2 actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_COMPO4",
|
||||
"libelle": "CF-0/2 actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_FIRSTC",
|
||||
"libelle": "CF-premier enfant",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HAND",
|
||||
"libelle": "H-handicap ou maladie chronique",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_NAIM",
|
||||
"libelle": "CF-naissance multiple",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "INDI_APE_RES",
|
||||
"text": "INDI_APE_RES",
|
||||
"level": "INDI_APE_RES",
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "APE_COMPO1",
|
||||
"libelle": "CF-100% actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_FRAT",
|
||||
"libelle": "CF-Fratrie d\u00e9j\u00e0 en accueil",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_KOFRAT",
|
||||
"libelle": "CF-sans proposition pour une partie de la fratrie",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HFRAT",
|
||||
"libelle": "SP-handicap fratrie",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_SPLOG",
|
||||
"libelle": "SP-situation particuli\u00e8re logement",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_ALLO",
|
||||
"libelle": "SP-accompagnement enfant allophone",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE-MINE",
|
||||
"libelle": "SP-parent mineur",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
|
@ -1,20 +1,4 @@
|
|||
[
|
||||
{
|
||||
"id": "AUT_OUTADL",
|
||||
"code": "AUT_OUTADL",
|
||||
"text": "Autorisation de sortie - ADL",
|
||||
"libelle": "Autorisation de sortie - ADL",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AUTO_OUT",
|
||||
"code": "AUTO_OUT",
|
||||
"text": "Autorisation de sortie - CLAE",
|
||||
"libelle": "Autorisation de sortie - CLAE",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AUTRE",
|
||||
"code": "AUTRE",
|
||||
|
@ -47,30 +31,6 @@
|
|||
"typeDesc": "NOTE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "HPURG",
|
||||
"code": "HPURG",
|
||||
"text": "Hospitalisation / musures d'urgence",
|
||||
"libelle": "Hospitalisation / musures d'urgence",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AUT_SANT",
|
||||
"code": "AUT_SANT",
|
||||
"text": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"libelle": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AUT_TRANS",
|
||||
"code": "AUT_TRANS",
|
||||
"text": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"libelle": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "MDPH",
|
||||
"code": "MDPH",
|
||||
|
|
|
@ -1,10 +1,4 @@
|
|||
[
|
||||
{
|
||||
"id": "MORAL",
|
||||
"code": "MORAL",
|
||||
"text": "",
|
||||
"libelle": null
|
||||
},
|
||||
{
|
||||
"id": "MME",
|
||||
"code": "MME",
|
||||
|
|
|
@ -1,11 +1,4 @@
|
|||
[
|
||||
{
|
||||
"id": "87",
|
||||
"code": "87",
|
||||
"rang": "PERSON",
|
||||
"text": "Acte de d\u00e9c\u00e8s",
|
||||
"libelle": "Acte de d\u00e9c\u00e8s"
|
||||
},
|
||||
{
|
||||
"id": "43",
|
||||
"code": "43",
|
||||
|
@ -195,13 +188,6 @@
|
|||
"text": "Certificat de scolarit\u00e9",
|
||||
"libelle": "Certificat de scolarit\u00e9"
|
||||
},
|
||||
{
|
||||
"id": "93",
|
||||
"code": "93",
|
||||
"rang": "PERSON",
|
||||
"text": "Certificat de travail",
|
||||
"libelle": "Certificat de travail"
|
||||
},
|
||||
{
|
||||
"id": "74",
|
||||
"code": "74",
|
||||
|
@ -233,7 +219,7 @@
|
|||
{
|
||||
"id": "37",
|
||||
"code": "37",
|
||||
"rang": "PERSON",
|
||||
"rang": "FAMILY",
|
||||
"text": "D\u00e9claration conjointe sign\u00e9e des parents",
|
||||
"libelle": "D\u00e9claration conjointe sign\u00e9e des parents"
|
||||
},
|
||||
|
@ -275,14 +261,14 @@
|
|||
{
|
||||
"id": "64",
|
||||
"code": "64",
|
||||
"rang": "PERSON",
|
||||
"rang": "FAMILY",
|
||||
"text": "Jugement des affaires familiales",
|
||||
"libelle": "Jugement des affaires familiales"
|
||||
},
|
||||
{
|
||||
"id": "65",
|
||||
"code": "65",
|
||||
"rang": "PERSON",
|
||||
"rang": "FAMILY",
|
||||
"text": "Jugement mise sous tutelle",
|
||||
"libelle": "Jugement mise sous tutelle"
|
||||
},
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
[
|
||||
{
|
||||
"id": "05DERO-8",
|
||||
"code": "05DERO-8",
|
||||
"text": "DERO05 - SANTE",
|
||||
"libelle": "DERO05 - SANTE"
|
||||
},
|
||||
{
|
||||
"id": "05DERO-6",
|
||||
"code": "05DERO-6",
|
||||
"text": "DERO05 - SANTE : SANTE / ORGANISATION",
|
||||
"libelle": "DERO05 - SANTE : SANTE / ORGANISATION"
|
||||
},
|
||||
{
|
||||
"id": "10DERO-2",
|
||||
"code": "10DERO-2",
|
||||
"text": "DERO10 - ORGANISATION",
|
||||
"libelle": "DERO10 - ORGANISATION"
|
||||
},
|
||||
{
|
||||
"id": "11DERO-1",
|
||||
"code": "11DERO-1",
|
||||
"text": "DERO11 - AUTRE",
|
||||
"libelle": "DERO11 - AUTRE"
|
||||
}
|
||||
]
|
|
@ -1,56 +0,0 @@
|
|||
[
|
||||
{
|
||||
"id": 102,
|
||||
"code": 102,
|
||||
"text": "CANTINE / CLAE",
|
||||
"libelle": "CANTINE / CLAE"
|
||||
},
|
||||
{
|
||||
"id": 103,
|
||||
"code": 103,
|
||||
"text": "CCAS",
|
||||
"libelle": "CCAS"
|
||||
},
|
||||
{
|
||||
"id": 101,
|
||||
"code": 101,
|
||||
"text": "DASC",
|
||||
"libelle": "DASC"
|
||||
},
|
||||
{
|
||||
"id": 104,
|
||||
"code": 104,
|
||||
"text": "DSCS",
|
||||
"libelle": "DSCS"
|
||||
},
|
||||
{
|
||||
"id": 105,
|
||||
"code": 105,
|
||||
"text": "ENFANCE LOISIRS",
|
||||
"libelle": "ENFANCE LOISIRS"
|
||||
},
|
||||
{
|
||||
"id": 106,
|
||||
"code": 106,
|
||||
"text": "PARCOURS EDUCATIFS",
|
||||
"libelle": "PARCOURS EDUCATIFS"
|
||||
},
|
||||
{
|
||||
"id": 107,
|
||||
"code": 107,
|
||||
"text": "REMBOURSEMENT",
|
||||
"libelle": "REMBOURSEMENT"
|
||||
},
|
||||
{
|
||||
"id": 108,
|
||||
"code": 108,
|
||||
"text": "SENIORS",
|
||||
"libelle": "SENIORS"
|
||||
},
|
||||
{
|
||||
"id": 109,
|
||||
"code": 109,
|
||||
"text": "SPORT",
|
||||
"libelle": "SPORT"
|
||||
}
|
||||
]
|
|
@ -1,92 +0,0 @@
|
|||
[
|
||||
{
|
||||
"id": "CE1",
|
||||
"age": 7,
|
||||
"code": "CE1",
|
||||
"text": "Cours \u00e9l\u00e9mentaire 1",
|
||||
"nature": null,
|
||||
"libelle": "Cours \u00e9l\u00e9mentaire 1",
|
||||
"numOrder": "6",
|
||||
"nextLevelCode": "CE2"
|
||||
},
|
||||
{
|
||||
"id": "CE2",
|
||||
"age": 8,
|
||||
"code": "CE2",
|
||||
"text": "Cours \u00e9l\u00e9mentaire 2",
|
||||
"nature": null,
|
||||
"libelle": "Cours \u00e9l\u00e9mentaire 2",
|
||||
"numOrder": "7",
|
||||
"nextLevelCode": "CM1"
|
||||
},
|
||||
{
|
||||
"id": "CM1",
|
||||
"age": 9,
|
||||
"code": "CM1",
|
||||
"text": "Cours moyen 1",
|
||||
"nature": null,
|
||||
"libelle": "Cours moyen 1",
|
||||
"numOrder": "8",
|
||||
"nextLevelCode": "CM2"
|
||||
},
|
||||
{
|
||||
"id": "CM2",
|
||||
"age": 10,
|
||||
"code": "CM2",
|
||||
"text": "Cours moyen 2",
|
||||
"nature": null,
|
||||
"libelle": "Cours moyen 2",
|
||||
"numOrder": "9",
|
||||
"nextLevelCode": null
|
||||
},
|
||||
{
|
||||
"id": "CP",
|
||||
"age": 6,
|
||||
"code": "CP",
|
||||
"text": "Cours pr\u00e9paratoire",
|
||||
"nature": null,
|
||||
"libelle": "Cours pr\u00e9paratoire",
|
||||
"numOrder": "5",
|
||||
"nextLevelCode": "CE1"
|
||||
},
|
||||
{
|
||||
"id": "GS",
|
||||
"age": 5,
|
||||
"code": "GS",
|
||||
"text": "Section grand",
|
||||
"nature": null,
|
||||
"libelle": "Section grand",
|
||||
"numOrder": "4",
|
||||
"nextLevelCode": "CP"
|
||||
},
|
||||
{
|
||||
"id": "MS",
|
||||
"age": 4,
|
||||
"code": "MS",
|
||||
"text": "Section moyen",
|
||||
"nature": null,
|
||||
"libelle": "Section moyen",
|
||||
"numOrder": "3",
|
||||
"nextLevelCode": "GS"
|
||||
},
|
||||
{
|
||||
"id": "PS",
|
||||
"age": 3,
|
||||
"code": "PS",
|
||||
"text": "Section petit",
|
||||
"nature": null,
|
||||
"libelle": "Section petit",
|
||||
"numOrder": "2",
|
||||
"nextLevelCode": "MS"
|
||||
},
|
||||
{
|
||||
"id": "TPS",
|
||||
"age": 2,
|
||||
"code": "TPS",
|
||||
"text": "Section tout petit",
|
||||
"nature": null,
|
||||
"libelle": "Section tout petit",
|
||||
"numOrder": "1",
|
||||
"nextLevelCode": "PS"
|
||||
}
|
||||
]
|
|
@ -1,20 +0,0 @@
|
|||
[
|
||||
{
|
||||
"id": 2022,
|
||||
"text": "2022",
|
||||
"schoolYear": 2022,
|
||||
"dateEndYearSchool": "2023-07-07T00:00:00+02:00",
|
||||
"dateStartYearSchool": "2022-09-01T00:00:00+02:00",
|
||||
"dateEndSubscribeSchool": "2023-09-01T00:00:00+02:00",
|
||||
"dateStartSubscribeSchool": "2022-09-01T00:00:00+02:00"
|
||||
},
|
||||
{
|
||||
"id": 2023,
|
||||
"text": "2023",
|
||||
"schoolYear": 2023,
|
||||
"dateEndYearSchool": "2024-07-07T00:00:00+02:00",
|
||||
"dateStartYearSchool": "2023-09-04T00:00:00+02:00",
|
||||
"dateEndSubscribeSchool": "2023-09-01T00:00:00+02:00",
|
||||
"dateStartSubscribeSchool": "2022-09-01T00:00:00+02:00"
|
||||
}
|
||||
]
|
File diff suppressed because it is too large
Load Diff
|
@ -1,9 +1,33 @@
|
|||
[
|
||||
{
|
||||
"id": "105",
|
||||
"code": "105",
|
||||
"text": "AUTRE",
|
||||
"libelle": "AUTRE"
|
||||
},
|
||||
{
|
||||
"id": "30",
|
||||
"code": "30",
|
||||
"text": "BCG",
|
||||
"libelle": "BCG"
|
||||
"text": "B.C.G.",
|
||||
"libelle": "B.C.G."
|
||||
},
|
||||
{
|
||||
"id": "56",
|
||||
"code": "56",
|
||||
"text": "BOOSTRIX",
|
||||
"libelle": "BOOSTRIX"
|
||||
},
|
||||
{
|
||||
"id": "27",
|
||||
"code": "27",
|
||||
"text": "CHOLERA",
|
||||
"libelle": "CHOLERA"
|
||||
},
|
||||
{
|
||||
"id": "48",
|
||||
"code": "48",
|
||||
"text": "Contr\u00f4le B.C.G.",
|
||||
"libelle": "Contr\u00f4le B.C.G."
|
||||
},
|
||||
{
|
||||
"id": "3",
|
||||
|
@ -17,17 +41,107 @@
|
|||
"text": "DIPHTERIE",
|
||||
"libelle": "DIPHTERIE"
|
||||
},
|
||||
{
|
||||
"id": "6",
|
||||
"code": "6",
|
||||
"text": "DIPHTERIE TETANOS",
|
||||
"libelle": "DIPHTERIE TETANOS"
|
||||
},
|
||||
{
|
||||
"id": "9",
|
||||
"code": "9",
|
||||
"text": "DIPHT TETANOS COQ",
|
||||
"libelle": "DIPHT TETANOS COQ"
|
||||
},
|
||||
{
|
||||
"id": "19",
|
||||
"code": "19",
|
||||
"text": "DT BISRUDIVAX",
|
||||
"libelle": "DT BISRUDIVAX"
|
||||
},
|
||||
{
|
||||
"id": "10",
|
||||
"code": "10",
|
||||
"text": "DT COQ POLIO",
|
||||
"libelle": "DT COQ POLIO"
|
||||
},
|
||||
{
|
||||
"id": "13",
|
||||
"code": "13",
|
||||
"text": "DT COQ POLIO IPAD",
|
||||
"libelle": "DT COQ POLIO IPAD"
|
||||
},
|
||||
{
|
||||
"id": "8",
|
||||
"code": "8",
|
||||
"text": "DTPOLIO",
|
||||
"libelle": "DTPOLIO"
|
||||
"text": "DT POLIO",
|
||||
"libelle": "DT POLIO"
|
||||
},
|
||||
{
|
||||
"id": "45",
|
||||
"code": "45",
|
||||
"text": "DT TETANOS COQ",
|
||||
"libelle": "DT TETANOS COQ"
|
||||
},
|
||||
{
|
||||
"id": "11",
|
||||
"code": "11",
|
||||
"text": "DT TYPHOIDE",
|
||||
"libelle": "DT TYPHOIDE"
|
||||
},
|
||||
{
|
||||
"id": "129",
|
||||
"code": "129",
|
||||
"text": "ENGERIX",
|
||||
"libelle": "ENGERIX"
|
||||
},
|
||||
{
|
||||
"id": "26",
|
||||
"code": "26",
|
||||
"text": "FIEVRE JAUNE",
|
||||
"libelle": "FIEVRE JAUNE"
|
||||
},
|
||||
{
|
||||
"id": "4",
|
||||
"code": "4",
|
||||
"text": "F.TYPHOIDES",
|
||||
"libelle": "F.TYPHOIDES"
|
||||
},
|
||||
{
|
||||
"id": "144",
|
||||
"code": "144",
|
||||
"text": "GRIPPE",
|
||||
"libelle": "GRIPPE"
|
||||
},
|
||||
{
|
||||
"id": "143",
|
||||
"code": "143",
|
||||
"text": "HAEMOPHILUS HIB",
|
||||
"libelle": "HAEMOPHILUS HIB"
|
||||
},
|
||||
{
|
||||
"id": "17",
|
||||
"code": "17",
|
||||
"text": "HAVRIX",
|
||||
"libelle": "HAVRIX"
|
||||
},
|
||||
{
|
||||
"id": "29",
|
||||
"code": "29",
|
||||
"text": "HEPATITEB",
|
||||
"libelle": "HEPATITEB"
|
||||
"text": "HEPATITE B",
|
||||
"libelle": "HEPATITE B"
|
||||
},
|
||||
{
|
||||
"id": "146",
|
||||
"code": "146",
|
||||
"text": "HEXAXIM",
|
||||
"libelle": "HEXAXIM"
|
||||
},
|
||||
{
|
||||
"id": "59",
|
||||
"code": "59",
|
||||
"text": "HEXYON",
|
||||
"libelle": "HEXYON"
|
||||
},
|
||||
{
|
||||
"id": "16",
|
||||
|
@ -36,28 +150,226 @@
|
|||
"libelle": "HIB"
|
||||
},
|
||||
{
|
||||
"id": "152",
|
||||
"code": "152",
|
||||
"text": "IIP",
|
||||
"libelle": "IIP"
|
||||
"id": "24",
|
||||
"code": "24",
|
||||
"text": "IMOVAX OREILLONS",
|
||||
"libelle": "IMOVAX OREILLONS"
|
||||
},
|
||||
{
|
||||
"id": "151",
|
||||
"code": "151",
|
||||
"text": "MENINGOCOQUE",
|
||||
"libelle": "MENINGOCOQUE"
|
||||
"id": "121",
|
||||
"code": "121",
|
||||
"text": "INFANRIX",
|
||||
"libelle": "INFANRIX"
|
||||
},
|
||||
{
|
||||
"id": "150",
|
||||
"code": "150",
|
||||
"text": "POLIO",
|
||||
"libelle": "POLIO"
|
||||
"id": "52",
|
||||
"code": "52",
|
||||
"text": "INFANRIX HEXA",
|
||||
"libelle": "INFANRIX HEXA"
|
||||
},
|
||||
{
|
||||
"id": "32",
|
||||
"code": "32",
|
||||
"text": "INFANRIX POLIO",
|
||||
"libelle": "INFANRIX POLIO"
|
||||
},
|
||||
{
|
||||
"id": "33",
|
||||
"code": "33",
|
||||
"text": "INFANRIX POLIO HIB",
|
||||
"libelle": "INFANRIX POLIO HIB"
|
||||
},
|
||||
{
|
||||
"id": "51",
|
||||
"code": "51",
|
||||
"text": "INFANRIX QUINTA",
|
||||
"libelle": "INFANRIX QUINTA"
|
||||
},
|
||||
{
|
||||
"id": "55",
|
||||
"code": "55",
|
||||
"text": "INFANRIX TETRA",
|
||||
"libelle": "INFANRIX TETRA"
|
||||
},
|
||||
{
|
||||
"id": "147",
|
||||
"code": "147",
|
||||
"text": "INFLUVAC TETRA",
|
||||
"libelle": "INFLUVAC TETRA"
|
||||
},
|
||||
{
|
||||
"id": "137",
|
||||
"code": "137",
|
||||
"text": "INNUGRIP",
|
||||
"libelle": "INNUGRIP"
|
||||
},
|
||||
{
|
||||
"id": "18",
|
||||
"code": "18",
|
||||
"text": "LEPTOSPIROSE",
|
||||
"libelle": "LEPTOSPIROSE"
|
||||
},
|
||||
{
|
||||
"id": "22",
|
||||
"code": "22",
|
||||
"text": "MENINGITE",
|
||||
"libelle": "MENINGITE"
|
||||
},
|
||||
{
|
||||
"id": "130",
|
||||
"code": "130",
|
||||
"text": "MENINGITEC",
|
||||
"libelle": "MENINGITEC"
|
||||
},
|
||||
{
|
||||
"id": "123",
|
||||
"code": "123",
|
||||
"text": "MENINVAC",
|
||||
"libelle": "MENINVAC"
|
||||
},
|
||||
{
|
||||
"id": "120",
|
||||
"code": "120",
|
||||
"text": "MENINVACT",
|
||||
"libelle": "MENINVACT"
|
||||
},
|
||||
{
|
||||
"id": "139",
|
||||
"code": "139",
|
||||
"text": "MENJUGATE",
|
||||
"libelle": "MENJUGATE"
|
||||
},
|
||||
{
|
||||
"id": "149",
|
||||
"code": "149",
|
||||
"text": "M-M RVAX PRO",
|
||||
"libelle": "M-M RVAX PRO"
|
||||
},
|
||||
{
|
||||
"id": "133",
|
||||
"code": "133",
|
||||
"text": "MONOTEST",
|
||||
"libelle": "MONOTEST"
|
||||
},
|
||||
{
|
||||
"id": "124",
|
||||
"code": "124",
|
||||
"text": "MONOVAX",
|
||||
"libelle": "MONOVAX"
|
||||
},
|
||||
{
|
||||
"id": "132",
|
||||
"code": "132",
|
||||
"text": "NEISVAC",
|
||||
"libelle": "NEISVAC"
|
||||
},
|
||||
{
|
||||
"id": "110",
|
||||
"code": "110",
|
||||
"text": "OTITE",
|
||||
"libelle": "OTITE"
|
||||
},
|
||||
{
|
||||
"id": "134",
|
||||
"code": "134",
|
||||
"text": "PANENZA",
|
||||
"libelle": "PANENZA"
|
||||
},
|
||||
{
|
||||
"id": "31",
|
||||
"code": "31",
|
||||
"text": "PENTACOQ",
|
||||
"libelle": "PENTACOQ"
|
||||
},
|
||||
{
|
||||
"id": "53",
|
||||
"code": "53",
|
||||
"text": "PENTAVAC",
|
||||
"libelle": "PENTAVAC"
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
"code": "2",
|
||||
"text": "POLIOMYELITE",
|
||||
"libelle": "POLIOMYELITE"
|
||||
},
|
||||
{
|
||||
"id": "128",
|
||||
"code": "128",
|
||||
"text": "PREVENAR",
|
||||
"libelle": "PREVENAR"
|
||||
},
|
||||
{
|
||||
"id": "125",
|
||||
"code": "125",
|
||||
"text": "PRIORIX",
|
||||
"libelle": "PRIORIX"
|
||||
},
|
||||
{
|
||||
"id": "54",
|
||||
"code": "54",
|
||||
"text": "REPEVAX",
|
||||
"libelle": "REPEVAX"
|
||||
},
|
||||
{
|
||||
"id": "47",
|
||||
"code": "47",
|
||||
"text": "REVAXIS",
|
||||
"libelle": "REVAXIS"
|
||||
},
|
||||
{
|
||||
"id": "28",
|
||||
"code": "28",
|
||||
"text": "ROR",
|
||||
"libelle": "ROR"
|
||||
"text": "R O R",
|
||||
"libelle": "R O R"
|
||||
},
|
||||
{
|
||||
"id": "127",
|
||||
"code": "127",
|
||||
"text": "ROR VAX",
|
||||
"libelle": "ROR VAX"
|
||||
},
|
||||
{
|
||||
"id": "135",
|
||||
"code": "135",
|
||||
"text": "ROTARIX",
|
||||
"libelle": "ROTARIX"
|
||||
},
|
||||
{
|
||||
"id": "20",
|
||||
"code": "20",
|
||||
"text": "ROUVAX",
|
||||
"libelle": "ROUVAX"
|
||||
},
|
||||
{
|
||||
"id": "23",
|
||||
"code": "23",
|
||||
"text": "RUDI ROUVAX",
|
||||
"libelle": "RUDI ROUVAX"
|
||||
},
|
||||
{
|
||||
"id": "21",
|
||||
"code": "21",
|
||||
"text": "RUDIVAX",
|
||||
"libelle": "RUDIVAX"
|
||||
},
|
||||
{
|
||||
"id": "113",
|
||||
"code": "113",
|
||||
"text": "SCARLATINE",
|
||||
"libelle": "SCARLATINE"
|
||||
},
|
||||
{
|
||||
"id": "14",
|
||||
"code": "14",
|
||||
"text": "SERUM ANTI-TETANIQUE",
|
||||
"libelle": "SERUM ANTI-TETANIQUE"
|
||||
},
|
||||
{
|
||||
"id": "141",
|
||||
"code": "141",
|
||||
"text": "SYNAGIS",
|
||||
"libelle": "SYNAGIS"
|
||||
},
|
||||
{
|
||||
"id": "1",
|
||||
|
@ -65,10 +377,46 @@
|
|||
"text": "TETANOS",
|
||||
"libelle": "TETANOS"
|
||||
},
|
||||
{
|
||||
"id": "7",
|
||||
"code": "7",
|
||||
"text": "TETANOS POLIO",
|
||||
"libelle": "TETANOS POLIO"
|
||||
},
|
||||
{
|
||||
"id": "12",
|
||||
"code": "12",
|
||||
"text": "TETRACOQ",
|
||||
"libelle": "TETRACOQ"
|
||||
"text": "TETRA COQ",
|
||||
"libelle": "TETRA COQ"
|
||||
},
|
||||
{
|
||||
"id": "46",
|
||||
"code": "46",
|
||||
"text": "TETRAVAC ACELLULAIRE",
|
||||
"libelle": "TETRAVAC ACELLULAIRE"
|
||||
},
|
||||
{
|
||||
"id": "107",
|
||||
"code": "107",
|
||||
"text": "VARICELLE",
|
||||
"libelle": "VARICELLE"
|
||||
},
|
||||
{
|
||||
"id": "15",
|
||||
"code": "15",
|
||||
"text": "VARIOLE",
|
||||
"libelle": "VARIOLE"
|
||||
},
|
||||
{
|
||||
"id": "34",
|
||||
"code": "34",
|
||||
"text": "VAXELIS",
|
||||
"libelle": "VAXELIS"
|
||||
},
|
||||
{
|
||||
"id": "148",
|
||||
"code": "148",
|
||||
"text": "VAXIGRIP",
|
||||
"libelle": "VAXIGRIP"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -7,9 +7,7 @@
|
|||
"dateBirth": "1970-01-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
"countryCode": null
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": false,
|
||||
|
@ -18,7 +16,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"lastname": "SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
|
@ -40,7 +38,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"lastname": "SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
@ -96,13 +94,13 @@
|
|||
"hospital": "Springfield General Hospital",
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
{
|
||||
"familyDoctor": {
|
||||
"name": "HIBBERT",
|
||||
"phone": "0656785678",
|
||||
"address": {
|
||||
"street1": "General Hospital",
|
||||
"zipcode": "90701",
|
||||
"town": "Springfield"
|
||||
}
|
||||
},
|
||||
"allergy1": null,
|
||||
"allergy2": null,
|
||||
"comment1": null,
|
||||
"comment2": null,
|
||||
"observ1": null,
|
||||
"observ2": null,
|
||||
"isAuthHospital": true,
|
||||
"hospital": "Springfield General Hospital",
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -10,13 +10,13 @@
|
|||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"lastname": "SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
"category": "BI",
|
||||
"situation": "MARI",
|
||||
"flagCom": true,
|
||||
"nbChild": 3,
|
||||
"nbTotalChild": 4,
|
||||
"nbAES": "1",
|
||||
"nbChild": null,
|
||||
"nbTotalChild": null,
|
||||
"nbAES": null,
|
||||
"RL1": {
|
||||
"num": "N/A",
|
||||
"firstname": "MARGE",
|
||||
|
@ -17,9 +17,7 @@
|
|||
"dateBirth": "1950-10-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
"countryCode": null
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": "2317",
|
||||
|
@ -27,8 +25,8 @@
|
|||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"town": "Springfield",
|
||||
"zipcode": "62701",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
|
@ -56,13 +54,9 @@
|
|||
"civility": "MR",
|
||||
"birth": {
|
||||
"dateBirth": "1956-05-12T00:00:00+01:00",
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
|
@ -131,13 +125,9 @@
|
|||
"sexe": "M",
|
||||
"birth": {
|
||||
"dateBirth": "2014-04-01T00:00:00+02:00",
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": true,
|
||||
|
@ -146,7 +136,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"lastname": "SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
|
@ -168,7 +158,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"lastname": "SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
@ -224,13 +214,13 @@
|
|||
"hospital": "Springfield General Hospital",
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
@ -260,8 +250,7 @@
|
|||
"dateBirth": "2016-05-09T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
"countryCode": null
|
||||
},
|
||||
"dietcode": "MENU_SV",
|
||||
"bPhoto": false,
|
||||
|
@ -294,8 +283,7 @@
|
|||
"dateBirth": "2018-12-17T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
"countryCode": null
|
||||
},
|
||||
"dietcode": "MENU_PAI",
|
||||
"bPhoto": false,
|
||||
|
@ -328,8 +316,7 @@
|
|||
"dateBirth": "2018-04-01T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
"countryCode": null
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": false,
|
||||
|
|
|
@ -2,10 +2,10 @@
|
|||
"number": "N/A",
|
||||
"category": "AUTR",
|
||||
"situation": "AUTR",
|
||||
"flagCom": false,
|
||||
"nbChild": 0,
|
||||
"nbTotalChild": 0,
|
||||
"nbAES": "0",
|
||||
"flagCom": true,
|
||||
"nbChild": null,
|
||||
"nbTotalChild": null,
|
||||
"nbAES": null,
|
||||
"RL1": {
|
||||
"num": "N/A",
|
||||
"firstname": "MARGE",
|
||||
|
@ -17,8 +17,7 @@
|
|||
"dateBirth": "1950-10-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
"countryCode": null
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
|
@ -56,9 +55,7 @@
|
|||
"dateBirth": "1956-05-12T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
"countryCode": null
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
|
@ -114,9 +111,7 @@
|
|||
"dateBirth": "1970-01-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
"countryCode": null
|
||||
},
|
||||
"dietcode": null,
|
||||
"bPhoto": false,
|
||||
|
@ -153,13 +148,13 @@
|
|||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
@ -188,8 +183,7 @@
|
|||
"dateBirth": "2016-05-09T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
"countryCode": null
|
||||
},
|
||||
"dietcode": "MENU_SV",
|
||||
"bPhoto": false,
|
||||
|
@ -222,8 +216,7 @@
|
|||
"dateBirth": "2018-12-17T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
"countryCode": null
|
||||
},
|
||||
"dietcode": "MENU_PAI",
|
||||
"bPhoto": false,
|
||||
|
@ -256,8 +249,7 @@
|
|||
"dateBirth": "2018-04-01T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
"countryCode": null
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": false,
|
||||
|
|
|
@ -1,36 +1,27 @@
|
|||
[
|
||||
{
|
||||
"yearRev": 2021,
|
||||
"dateStart": "2022-01-02T00:00:00+01:00",
|
||||
"dateEnd": "2022-12-31T00:00:00+01:00",
|
||||
"mtt": 1500.33,
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
},
|
||||
{
|
||||
"yearRev": 2020,
|
||||
"dateStart": "2022-01-02T00:00:00+01:00",
|
||||
"dateEnd": "2022-12-31T00:00:00+01:00",
|
||||
"mtt": 1500.33,
|
||||
"cdquo": "2",
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus Petite enfance"
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
},
|
||||
{
|
||||
"yearRev": 2021,
|
||||
"dateStart": "2022-01-01T00:00:00+01:00",
|
||||
"dateEnd": "2022-01-01T00:00:00+01:00",
|
||||
"mtt": 1500.33,
|
||||
"cdquo": "2",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus Petite enfance"
|
||||
},
|
||||
{
|
||||
"yearRev": 2021,
|
||||
"dateStart": "2022-01-02T00:00:00+01:00",
|
||||
"dateEnd": "2022-12-31T00:00:00+01:00",
|
||||
"mtt": 1500.33,
|
||||
"cdquo": "2",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus Petite enfance"
|
||||
},
|
||||
{
|
||||
"yearRev": 2022,
|
||||
"dateStart": "2022-09-01T00:00:00+02:00",
|
||||
"dateEnd": "3000-08-31T00:00:00+02:00",
|
||||
"mtt": 5000.0,
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
|
|
|
@ -9,8 +9,7 @@
|
|||
"dateBirth": "1950-10-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
"countryCode": null
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": "2317",
|
||||
|
@ -18,8 +17,8 @@
|
|||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"town": "Springfield",
|
||||
"zipcode": "62701",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
|
|
|
@ -9,9 +9,7 @@
|
|||
"dateBirth": "1956-05-12T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
"countryCode": null
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import argparse
|
||||
|
||||
import requests
|
||||
import zeep
|
||||
from zeep.transports import Transport
|
||||
from zeep.wsse.username import UsernameToken
|
||||
|
||||
WSSE = UsernameToken('maelis-webservice', 'maelis-password')
|
||||
WSDL_URL = 'https://demo-toulouse.sigec.fr/maelisws-toulouse-recette/services/FamilyService?wsdl'
|
||||
|
||||
|
||||
def read_family(family_id, verbose):
|
||||
session = requests.Session()
|
||||
session.verify = False
|
||||
transport = Transport(session=session)
|
||||
settings = zeep.Settings(strict=False, xsd_ignore_sequence_order=True)
|
||||
client = zeep.Client(WSDL_URL, transport=transport, wsse=WSSE, settings=settings)
|
||||
|
||||
result = client.service.readFamily(
|
||||
dossierNumber=family_id,
|
||||
# schoolYear=
|
||||
# incomeYear=2022, # <-- pour filtrer les quotients sur cette année
|
||||
# referenceYear=2020,
|
||||
)
|
||||
print(result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors')
|
||||
parser.add_argument('family_id', help='196544', nargs='?', default='196544')
|
||||
args = parser.parse_args()
|
||||
read_family(args.family_id, verbose=args.verbose)
|
|
@ -1,192 +0,0 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import link, unlink
|
||||
|
||||
|
||||
def test_create_nursery_demand_on_existing_child(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/get-nursery-geojson'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
nurseries = resp.json()['features']
|
||||
assert len(nurseries) >= 2
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
nb_childs = len(res['data']['childList'])
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == []
|
||||
|
||||
url = conn + '/read-child?NameID=%s&child_id=%s' % (create_data['name_id'], create_data['maggie_num'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == []
|
||||
|
||||
url = conn + '/create-nursery-demand'
|
||||
payload = {
|
||||
'family_id': create_data['family_id'],
|
||||
'family_indicators/0/code': 'APE_FIRSTC',
|
||||
'family_indicators/0/isActive': True,
|
||||
'child_id': create_data['maggie_num'],
|
||||
'demand_indicators/0/code': 'APE_COMPO1',
|
||||
'demand_indicators/0/isActive': True,
|
||||
'start_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
'number_of_days': '2',
|
||||
'start_hour_Mon': '08:00',
|
||||
'end_hour_Mon': '',
|
||||
'comment': 'bla',
|
||||
'accept_other_nurseries': True,
|
||||
'nursery1/idActivity': nurseries[0]['properties']['activity_id'],
|
||||
'nursery1/idUnit': nurseries[0]['properties']['unit_id'],
|
||||
'nursery1/idPlace': nurseries[0]['properties']['place_id'],
|
||||
'nursery2/idActivity': nurseries[1]['properties']['activity_id'],
|
||||
'nursery2/idUnit': nurseries[1]['properties']['unit_id'],
|
||||
'nursery2/idPlace': nurseries[1]['properties']['place_id'],
|
||||
'nursery3/idActivity': '',
|
||||
'nursery3/idUnit': '',
|
||||
'nursery3/idPlace': '',
|
||||
# indicators
|
||||
'child_indicators/0/code': 'APE_HBOTH',
|
||||
'child_indicators/0/isActive': True,
|
||||
'child_indicators/1/code': 'APE_HPAR',
|
||||
'child_indicators/1/isActive': True,
|
||||
'child_indicators/2/code': 'APE_COMPO3',
|
||||
'child_indicators/2/isActive': True,
|
||||
'child_indicators/3/code': 'APE_MULTIACC',
|
||||
'child_indicators/3/isActive': True,
|
||||
'family_indicators/0/code': 'APE_COMPO4',
|
||||
'family_indicators/0/isActive': True,
|
||||
'family_indicators/1/code': 'APE_NAIM',
|
||||
'family_indicators/1/isActive': True,
|
||||
'family_indicators/2/code': 'APE_FIRSTC',
|
||||
'family_indicators/2/isActive': True,
|
||||
'family_indicators/3/code': 'APE_COMPO2',
|
||||
'family_indicators/3/isActive': True,
|
||||
'family_indicators/4/code': 'APE_HAND',
|
||||
'family_indicators/4/isActive': True,
|
||||
'demand_indicators/0/code': 'APE_FRAT',
|
||||
'demand_indicators/0/isActive': True,
|
||||
'demand_indicators/1/code': 'APE_COMPO1',
|
||||
'demand_indicators/1/isActive': True,
|
||||
'demand_indicators/2/code': 'APE_HFRAT',
|
||||
'demand_indicators/2/isActive': True,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'data': None, 'err': 0}
|
||||
|
||||
# no child added
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert len(res['data']['childList']) == nb_childs
|
||||
|
||||
# check indicators
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == [
|
||||
'APE_COMPO2',
|
||||
'APE_COMPO4',
|
||||
'APE_FIRSTC',
|
||||
'APE_HAND',
|
||||
'APE_NAIM',
|
||||
]
|
||||
|
||||
url = conn + '/read-child?NameID=%s&child_id=%s' % (create_data['name_id'], create_data['maggie_num'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == [
|
||||
'APE_COMPO3',
|
||||
'APE_HBOTH',
|
||||
'APE_HPAR',
|
||||
'APE_MULTIACC',
|
||||
]
|
||||
|
||||
|
||||
def test_create_nursery_demand_adding_new_child(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/get-nursery-geojson'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
nurseries = resp.json()['features']
|
||||
assert len(nurseries) >= 2
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
nb_childs = len(res['data']['childList'])
|
||||
assert 'NELSON' not in [x['firstname'] for x in res['data']['childList']]
|
||||
|
||||
url = conn + '/create-nursery-demand'
|
||||
payload = {
|
||||
'family_id': create_data['family_id'],
|
||||
'child_first_name': 'Nelson',
|
||||
'child_last_name': 'Muntz',
|
||||
'child_birthdate': '2013-10-31',
|
||||
'child_gender': 'G',
|
||||
'start_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
'nursery1/idActivity': nurseries[0]['properties']['activity_id'],
|
||||
'nursery1/idUnit': nurseries[0]['properties']['unit_id'],
|
||||
'nursery1/idPlace': nurseries[0]['properties']['place_id'],
|
||||
'nursery2/idActivity': nurseries[1]['properties']['activity_id'],
|
||||
'nursery2/idUnit': nurseries[1]['properties']['unit_id'],
|
||||
'nursery2/idPlace': nurseries[1]['properties']['place_id'],
|
||||
'nursery3/idActivity': '',
|
||||
'nursery3/idUnit': '',
|
||||
'nursery3/idPlace': '',
|
||||
# indicators
|
||||
'child_indicators/0/code': 'APE_HBOTH',
|
||||
'child_indicators/0/isActive': True,
|
||||
'child_indicators/1/code': 'APE_HPAR',
|
||||
'child_indicators/1/isActive': True,
|
||||
'child_indicators/2/code': 'APE_COMPO3',
|
||||
'child_indicators/2/isActive': True,
|
||||
'child_indicators/3/code': 'APE_MULTIACC',
|
||||
'child_indicators/3/isActive': True,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert res['err'] == 0
|
||||
child_id = resp.json()['data']
|
||||
assert child_id is not None
|
||||
|
||||
# a new child is created on family
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert len(res['data']['childList']) == nb_childs + 1
|
||||
assert 'NELSON' in [x['firstname'] for x in res['data']['childList']]
|
||||
assert res['data']['childList'][nb_childs]['num'] == child_id
|
||||
|
||||
# check child indicators
|
||||
url = conn + '/read-child?NameID=%s&child_id=%s' % (create_data['name_id'], child_id)
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert res['data']['firstname'] == 'NELSON'
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == [
|
||||
'APE_COMPO3',
|
||||
'APE_HBOTH',
|
||||
'APE_HPAR',
|
||||
'APE_MULTIACC',
|
||||
]
|
|
@ -1,308 +0,0 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def school_year(conn):
|
||||
url = conn + '/read-school-years-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
year = res['data'][0]['text']
|
||||
return year
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def exemption(conn):
|
||||
# get an exemption code
|
||||
url = conn + '/read-exemption-reasons-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
return res['data'][0]['id']
|
||||
|
||||
|
||||
def test_displaying_school_subscribed(conn, create_data, school_year, exemption):
|
||||
"""
|
||||
Read-family ramène les inscriptions aux date de visualisation paramétrées
|
||||
sur le référential YearSchool
|
||||
"""
|
||||
school_year = str(int(school_year) + 1)
|
||||
|
||||
# create a 7 year-old child
|
||||
url = conn + '/create-child?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'sexe': 'F',
|
||||
'firstname': 'Claris',
|
||||
'lastname': create_data['lastname'],
|
||||
'birth': {'dateBirth': '2016-09-12'},
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
claris_id = str(resp.json()['data']['child_id'])
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration'
|
||||
payload = {
|
||||
'numPerson': claris_id,
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CE1',
|
||||
'dateSubscribe': school_year + '-01-01',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data']['returnMessage'] is None
|
||||
assert resp.json()['data']['subscribeSchoolBean']['schoolName'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
assert resp.json()['data']['subscribeSchoolBean']['adresse'] == '101 GRANDE-RUE SAINT MICHEL'
|
||||
|
||||
# get Claris school from read-family
|
||||
url = conn + '/read-school-years-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()['data']
|
||||
date_start = [x['dateStartYearSchool'] for x in res if x['text'] == school_year][0]
|
||||
assert date_start[10] > datetime.datetime.now().strftime('%Y-%m-%d')
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
schools = [x['subscribeSchoolList'] for x in res['data']['childList'] if x['num'] == claris_id][0]
|
||||
assert len(schools) == 0 # school is filtered, but it is related to an hidden school year
|
||||
# field, not dateStartYearSchool, checked before : #2425
|
||||
|
||||
|
||||
def test_school_pre_registration_by_sector(conn, create_data, school_year, exemption):
|
||||
"""
|
||||
Pré-inscription de l'enfant de 7 ans dans son secteur
|
||||
"""
|
||||
# create a 7 year-old child
|
||||
url = conn + '/create-child?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'sexe': 'F',
|
||||
'firstname': 'Sego',
|
||||
'lastname': create_data['lastname'],
|
||||
'birth': {'dateBirth': '2016-05-09'},
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
sego_id = str(resp.json()['data']['child_id'])
|
||||
|
||||
# assert there is a school at this address
|
||||
url = conn + '/read-schools-for-address-and-level'
|
||||
params = {
|
||||
'id_street': '2317',
|
||||
'num': '4',
|
||||
'year': school_year,
|
||||
'level': 'CE1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['text'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
|
||||
# assert there is a school at child address
|
||||
url = conn + '/read-schools-for-child-and-level'
|
||||
params = {
|
||||
'child_id': sego_id,
|
||||
'year': school_year,
|
||||
'level': 'CE1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['text'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
school_id = resp.json()['data'][0]['idSchool']
|
||||
assert school_id == '2435'
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration'
|
||||
payload = {
|
||||
'numPerson': sego_id,
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CE1',
|
||||
'dateSubscribe': school_year + '-01-01',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data']['returnMessage'] is None
|
||||
assert resp.json()['data']['subscribeSchoolBean']['schoolName'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
assert resp.json()['data']['subscribeSchoolBean']['adresse'] == '101 GRANDE-RUE SAINT MICHEL'
|
||||
|
||||
# get Sego school from read-family
|
||||
url = conn + '/read-school-years-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()['data']
|
||||
date_start = [x['dateStartYearSchool'] for x in res if x['text'] == school_year][0]
|
||||
assert date_start[10] > datetime.datetime.now().strftime('%Y-%m-%d')
|
||||
# school is filtered, but it is related to an hidden school year
|
||||
# field, not dateStartYearSchool, see #2425
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
schools = [x['subscribeSchoolList'] for x in res['data']['childList'] if x['num'] == sego_id][0]
|
||||
assert len(schools) == 1
|
||||
assert schools[0]['schoolName'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
|
||||
"""
|
||||
Pré-inscription d'un enfant de 5 ans en CP avec rappprochement de fratrie pour celui de 7 ans :
|
||||
rapprochement dans le secteur de l'enfant.
|
||||
"""
|
||||
# get Sego school
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': sego_id,
|
||||
'year': school_year,
|
||||
'level': 'CE1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
schools = data['childSubscribeSchoolInformation']['subscribeSchoolYearList']
|
||||
assert len(schools) == 1
|
||||
assert schools[0]['subscribeSchool']['school']['idSchool'] == school_id
|
||||
assert schools[0]['subscribeSchool']['perim']['idPerim'] == '2707'
|
||||
|
||||
url = conn + '/create-child-school-pre-registration-with-sibling'
|
||||
payload = {
|
||||
'numPerson': create_data['maggie_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'GS',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idSchoolRequested': school_id,
|
||||
'numPersonSibling': sego_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert 'returnMessage' not in resp.json()
|
||||
assert resp.json()['data']['schoolName'] == 'CALAS MATERNELLE'
|
||||
assert resp.json()['data']['adresse'] == '47 RUE ACHILLE VIADIEU' # same sector
|
||||
|
||||
|
||||
def test_school_pre_registration_by_exemption(conn, create_data, school_year, exemption):
|
||||
"""
|
||||
Pré-inscription de l'enfant de 9 ans en dérogation :
|
||||
c'est une dérogation avec sélection du motif sur un établissement hors secteur
|
||||
"""
|
||||
# school list
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'year': school_year,
|
||||
'level': 'CM1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
schools = data['childSubscribeSchoolInformation']['subscribeSchoolInformation']['derogSchoolList']
|
||||
assert len(schools) > 1
|
||||
school_id = schools[0]['id']
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration-with-exemption'
|
||||
payload = {
|
||||
'numPerson': create_data['bart_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CM1',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idRequestSchool1': school_id,
|
||||
'derogReasonCode': exemption,
|
||||
'derogComment': 'bla',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert 'returnMessage' not in resp.json()
|
||||
assert resp.json()['data']['schoolName'] == 'AMIDONNIERS ELEMENTAIRE'
|
||||
assert resp.json()['data']['adresse'] == '123 ALL DE BRIENNE'
|
||||
|
||||
"""
|
||||
Pré-inscription de l'autre enfant de 5 ans en CP
|
||||
avec rapprochement de fratrie pour celui de 9 ans :
|
||||
rapprochement hors du secteur de l'enfant.
|
||||
"""
|
||||
|
||||
# check E124 error
|
||||
# get a school that do not provide a level in its sector
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['hugo_num'],
|
||||
'year': school_year,
|
||||
'level': 'GS',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert [
|
||||
x['idSchool']
|
||||
for x in data['childSubscribeSchoolInformation']['subscribeSchoolInformation']['derogSchoolList']
|
||||
if x['text'] == 'DIEUZAIDE JEAN MATERNELLE'
|
||||
] == ['2437']
|
||||
|
||||
# try to book on a sector that do not provide the requested level
|
||||
url = conn + '/create-child-school-pre-registration-with-sibling'
|
||||
payload = {
|
||||
'numPerson': create_data['hugo_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CP',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idSchoolRequested': '2437',
|
||||
'numPersonSibling': create_data['bart_num'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 1
|
||||
assert resp.json()['err_class'] == 'passerelle.utils.soap.SOAPFault'
|
||||
assert 'E124' in resp.json()['err_desc']
|
||||
|
||||
# get Bart school
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'year': school_year,
|
||||
'level': 'CM1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
schools = data['childSubscribeSchoolInformation']['subscribeSchoolYearList']
|
||||
assert len(schools) == 1
|
||||
assert schools[0]['subscribeSchool']['school']['idSchool'] == school_id
|
||||
assert schools[0]['subscribeSchool']['perim']['idPerim'] == '2663'
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration-with-sibling'
|
||||
payload = {
|
||||
'numPerson': create_data['hugo_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'GS',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idSchoolRequested': school_id,
|
||||
'numPersonSibling': create_data['bart_num'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert 'returnMessage' not in resp.json()
|
||||
assert resp.json()['data']['schoolName'] == 'AMIDONNIERS MATERNELLE'
|
||||
assert resp.json()['data']['adresse'] == '125 ALL DE BRIENNE'
|
|
@ -1,369 +0,0 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import link, unlink
|
||||
|
||||
|
||||
def test_perisco(perisco_subscribe_info):
|
||||
assert perisco_subscribe_info['info']['activity']['libelle1'] == 'TEST TEMPS DU MIDI 22/23'
|
||||
|
||||
|
||||
def test_perisco_adulte(perisco_subscribe_adulte_info):
|
||||
assert perisco_subscribe_adulte_info['info']['activity']['libelle1'] == 'TEST RESTAURATION ADULTE 22/23'
|
||||
|
||||
|
||||
def test_perisco_agenda(conn, create_data, perisco_subscribe_info):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_info['place']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# find first available booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
booking = None
|
||||
for booking in resp.json()['data']:
|
||||
if booking['disabled'] is False:
|
||||
break
|
||||
else:
|
||||
raise Exception('no booking available')
|
||||
assert booking['details']['activity_id'] == perisco_subscribe_info['activity']['id']
|
||||
assert booking['details']['activity_label'] == 'Temps du midi'
|
||||
assert booking['prefill'] is False
|
||||
|
||||
# book activity
|
||||
url = conn + '/update-child-agenda?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [booking['id']],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {
|
||||
'updated': True,
|
||||
'count': 1,
|
||||
'changes': [
|
||||
{
|
||||
'booked': True,
|
||||
'activity_id': booking['details']['activity_id'],
|
||||
'activity_label': 'Temps du midi',
|
||||
'day': booking['details']['day_str'],
|
||||
}
|
||||
],
|
||||
'err': 0,
|
||||
}
|
||||
|
||||
# check booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [x['prefill'] for x in resp.json()['data'] if x['id'] == booking['id']][0] is True
|
||||
|
||||
|
||||
def test_perisco_agenda_adulte(conn, create_data2, perisco_subscribe_adulte_info):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['rl1_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_adulte_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_adulte_info['place']['id'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# find first available booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl1_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
booking = None
|
||||
for booking in resp.json()['data']:
|
||||
if booking['disabled'] is False:
|
||||
break
|
||||
else:
|
||||
raise Exception('no booking available')
|
||||
assert booking['details']['activity_id'] == perisco_subscribe_adulte_info['activity']['id']
|
||||
assert booking['details']['activity_label'] == 'RESTAURATION ADULTE'
|
||||
assert booking['prefill'] is False
|
||||
|
||||
# book activity
|
||||
url = conn + '/update-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'child_id': create_data2['rl1_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [booking['id']],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {
|
||||
'updated': True,
|
||||
'count': 1,
|
||||
'changes': [
|
||||
{
|
||||
'booked': True,
|
||||
'activity_id': booking['details']['activity_id'],
|
||||
'activity_label': 'RESTAURATION ADULTE',
|
||||
'day': booking['details']['day_str'],
|
||||
}
|
||||
],
|
||||
'err': 0,
|
||||
}
|
||||
|
||||
# check booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl1_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [x['prefill'] for x in resp.json()['data'] if x['id'] == booking['id']][0] is True
|
||||
|
||||
|
||||
def test_perisco_recurrent_week(conn, create_data, perisco_subscribe_info, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
# no subscribed activity
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 0
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_info['place']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['id'] == perisco_subscribe_info['activity']['id']
|
||||
assert [(x['text'], x['libelle'], x['libelle2']) for x in resp.json()['data']] == [
|
||||
('Temps du midi', 'TEST TEMPS DU MIDI 22/23', 'Temps du midi'),
|
||||
]
|
||||
|
||||
# get recurent-week gabarit
|
||||
url = conn + '/get-recurrent-week?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'ref_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [(x['id'], x['day']) for x in resp.json()['data']] == [
|
||||
('1-X', 'Lundi'),
|
||||
('2-X', 'Mardi'),
|
||||
('4-X', 'Jeudi'),
|
||||
('5-X', 'Vendredi'),
|
||||
]
|
||||
|
||||
# no booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['maggie_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert not any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
# set recurent-week gabarit
|
||||
url = conn + '/update-recurrent-week?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'recurrent_week': ['1-X', '2-X'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
|
||||
# there is now some bookings
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['maggie_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
|
||||
def test_perisco_recurrent_week_adulte(conn, create_data2, perisco_subscribe_adulte_info, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
# no subscribed activity
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 0
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_adulte_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_adulte_info['place']['id'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['id'] == perisco_subscribe_adulte_info['activity']['id']
|
||||
assert [(x['text'], x['libelle'], x['libelle2']) for x in resp.json()['data']] == [
|
||||
('RESTAURATION ADULTE', 'TEST RESTAURATION ADULTE 22/23', 'RESTAURATION ADULTE')
|
||||
]
|
||||
|
||||
# get recurent-week gabarit
|
||||
url = conn + '/get-recurrent-week?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'ref_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [(x['id'], x['day']) for x in resp.json()['data']] == [
|
||||
('1-X', 'Lundi'),
|
||||
('2-X', 'Mardi'),
|
||||
('3-X', 'Mercredi'),
|
||||
('4-X', 'Jeudi'),
|
||||
('5-X', 'Vendredi'),
|
||||
]
|
||||
|
||||
# no booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl2_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert not any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
# set recurent-week gabarit
|
||||
url = conn + '/update-recurrent-week?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
'recurrent_week': ['1-X', '2-X'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
|
||||
# there is now some bookings
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl2_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert any(x['prefill'] for x in resp.json()['data'])
|
|
@ -1,261 +0,0 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import get_subscription_info, link, unlink
|
||||
|
||||
# LOISIR is like EXTRACO (most tests are redondants) but :
|
||||
# * there is no calendar (days) to provide.
|
||||
# * there is a general catalog to display
|
||||
|
||||
|
||||
def test_catalog_general_loisirs(conn, update_data):
|
||||
unlink(conn, update_data['name_id'])
|
||||
link(conn, update_data)
|
||||
url = conn + '/read-activity-list'
|
||||
params = {'ref_date': datetime.date.today().strftime('%Y-%m-%d')}
|
||||
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
labels = [x['text'] for x in resp.json()['data']]
|
||||
assert (
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 13h45/17h - 8/15Ans, ARGOULETS'
|
||||
in labels
|
||||
)
|
||||
assert (
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 14h/16h30 - 10/15Ans, LA RAMEE'
|
||||
in labels
|
||||
)
|
||||
assert (
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 15h30/17h - 8/15Ans, ARGOULETS'
|
||||
in labels
|
||||
)
|
||||
assert 'Promenade forêt enchantée, TEST promenade forêt enchantée, TERRITOIRE OUEST' in labels
|
||||
assert 'Vitrail Fusing 1/2 Je Adultes, Inscription annuelle, Centre Culturel ALBAN MINVILLE' in labels
|
||||
|
||||
for item in resp.json()['data']:
|
||||
if (
|
||||
item['text']
|
||||
== 'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 13h45/17h - 8/15Ans, ARGOULETS'
|
||||
):
|
||||
assert item['criterias'] == {
|
||||
'service': {'text': 'Service', 'data': {'sports': 'Sports'}, 'order': ['sports']},
|
||||
'nature': {
|
||||
'text': "Nature de l'activité",
|
||||
'data': {'1': 'Activités Régulières'},
|
||||
'order': ['1'],
|
||||
},
|
||||
'type': {
|
||||
'text': "Type de l'activité",
|
||||
'data': {'activites-aquatiques': 'Activités Aquatiques'},
|
||||
'order': ['activites-aquatiques'],
|
||||
},
|
||||
'public': {
|
||||
'text': 'Public',
|
||||
'data': {'1': 'Enfant (3-11 ans)', '2': 'Ado (12-17 ans)'},
|
||||
'order': ['1', '2'],
|
||||
},
|
||||
'day': {'text': 'Jours', 'data': {'3': 'Mercredi'}, 'order': ['3']},
|
||||
'place': {'text': 'Lieu', 'data': {'A10053179757': 'ARGOULETS'}, 'order': ['A10053179757']},
|
||||
}
|
||||
assert item['activity']['activityPortail']['blocNoteList'] == [
|
||||
{
|
||||
'note': "Activité ayant lieu le Mercredi, merci de choisir votre tranche horraire en fonction de l'âge de votre enfant.",
|
||||
'numIndex': 1,
|
||||
}
|
||||
]
|
||||
if item['text'] == 'Promenade forêt enchantée, TEST promenade forêt enchantée, TERRITOIRE OUEST':
|
||||
assert item['criterias'] == {
|
||||
'service': {'text': 'Service', 'data': {'sports': 'Sports'}, 'order': ['sports']},
|
||||
'nature': {
|
||||
'text': "Nature de l'activité",
|
||||
'data': {'1': 'Activités Régulières'},
|
||||
'order': ['1'],
|
||||
},
|
||||
'type': {
|
||||
'text': "Type de l'activité",
|
||||
'data': {'activite-pedestre': 'Activité Pédestre'},
|
||||
'order': ['activite-pedestre'],
|
||||
},
|
||||
'public': {'text': 'Public', 'data': {'5': 'Sénior (60 ans et plus)'}, 'order': ['5']},
|
||||
'day': {
|
||||
'text': 'Jours',
|
||||
'data': {'1': 'Lundi', '2': 'Mardi', '3': 'Mercredi', '4': 'Jeudi', '5': 'Vendredi'},
|
||||
'order': ['1', '2', '3', '4', '5'],
|
||||
},
|
||||
'place': {
|
||||
'text': 'Lieu',
|
||||
'data': {'A10056517597': 'TERRITOIRE OUEST'},
|
||||
'order': ['A10056517597'],
|
||||
},
|
||||
}
|
||||
assert item['activity']['activityPortail']['blocNoteList'] == [
|
||||
{'note': 'Activité de promenade en forêt.', 'numIndex': 1}
|
||||
]
|
||||
|
||||
|
||||
def test_catalog_personnalise_loisirs(loisirs_subscribe_info):
|
||||
assert (
|
||||
loisirs_subscribe_info['info']['activity']['libelle1']
|
||||
== 'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES'
|
||||
)
|
||||
assert loisirs_subscribe_info['info']['calendarGeneration']['code'] == 'REQUIRED'
|
||||
assert [(x['id'], x['day']) for x in loisirs_subscribe_info['info']['recurrent_week']] == []
|
||||
assert loisirs_subscribe_info['info']['billingInformation'] == {
|
||||
'modeFact': 'FORFAIT',
|
||||
'quantity': 1.0,
|
||||
'unitPrice': 88.5,
|
||||
}
|
||||
|
||||
|
||||
def test_catalog_personnalise_loisirs_not_allowed(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
try:
|
||||
get_subscription_info(
|
||||
'LOISIRS',
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES',
|
||||
'MERCREDI - 15h30/17h - 8/15Ans',
|
||||
'ARGOULETS',
|
||||
conn,
|
||||
create_data['name_id'],
|
||||
create_data['rl1_num'],
|
||||
reference_year,
|
||||
)
|
||||
except Exception:
|
||||
return
|
||||
assert False, 'Adult can subscribe to child activity'
|
||||
|
||||
|
||||
def test_direct_subscribe(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['hugo_num'],
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# no idIns provided to remove subscription later
|
||||
assert resp.json()['data'] == {'controlOK': True, 'message': None}
|
||||
|
||||
|
||||
def test_direct_subscribe_out_town(conn, create_data2, loisirs_subscribe_info2, reference_year):
|
||||
assert loisirs_subscribe_info2['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['hugo_num'],
|
||||
'activity_id': loisirs_subscribe_info2['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info2['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info2['place']['id'],
|
||||
'start_date': loisirs_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info2['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# no idIns provided to remove subscription later
|
||||
assert resp.json()['data'] == {'controlOK': True, 'message': None}
|
||||
|
||||
|
||||
def test_subscribe_to_basket(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
basket_id = resp.json()['data']['basket']['id']
|
||||
|
||||
# remove subscription
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
|
||||
@pytest.mark.xfail(run=False)
|
||||
def test_global_capacity(conn, create_data2, loisirs_subscribe_info3, reference_year):
|
||||
assert loisirs_subscribe_info3['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
# subscribe Bart
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
# url = conn + '/add-person-basket-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['bart_num'],
|
||||
'activity_id': loisirs_subscribe_info3['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info3['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info3['place']['id'],
|
||||
'start_date': loisirs_subscribe_info3['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info3['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
# basket_id = resp.json()['data']['basket']['id']
|
||||
|
||||
# subscribe Lisa
|
||||
payload['person_id'] = create_data2['lisa_num']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# subscribe Maggie
|
||||
payload['person_id'] = create_data2['maggie_num']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# can't subscribe Huggo
|
||||
payload['person_id'] = create_data2['hugo_num']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 1
|
||||
assert resp.json()['err_desc'] == ''
|
||||
|
||||
# check capacity on main catalog
|
||||
url = conn + '/read-activity-list'
|
||||
params = {'ref_date': datetime.date.today().strftime('%Y-%m-%d')}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
for item in resp.json()['data']:
|
||||
if item['activity']['libelle'] == 'PUBLIK Vitrail Fusing 1/2 Je Adultes 2022/2023 - Mardi 14h-1':
|
||||
import pdb
|
||||
|
||||
pdb.set_trace()
|
||||
|
||||
# # remove subscriptions
|
||||
# url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
# payload = {'basket_id': basket_id}
|
||||
# resp = requests.post(url, json=payload)
|
||||
# resp.raise_for_status()
|
||||
# assert resp.json()['err'] == 0
|
|
@ -1,322 +0,0 @@
|
|||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
def test_catalog_personnalise_extrasco(extrasco_subscribe_info):
|
||||
assert (
|
||||
extrasco_subscribe_info['info']['activity']['libelle1']
|
||||
== 'PUBLIK ADL ELEMENTAIRE Maourine JUIN 22/23(NE PAS UTILISER)'
|
||||
)
|
||||
assert extrasco_subscribe_info['info']['calendarGeneration']['code'] == 'NOT_REQUIRED'
|
||||
assert extrasco_subscribe_info['info']['billingInformation'] == {
|
||||
'modeFact': 'PRESENCE',
|
||||
'quantity': None,
|
||||
'unitPrice': 11.5,
|
||||
}
|
||||
assert extrasco_subscribe_info['info']['activity']['blocNoteList'] == [
|
||||
{
|
||||
'note': 'Lien vers le réglement intérieur :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/03/23/flyer-sejour.pdf\r\nLien vers arrêté municipal :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/04/05/arrete-municipal.pdf',
|
||||
'numIndex': 1,
|
||||
}
|
||||
]
|
||||
assert (
|
||||
extrasco_subscribe_info['info']['agenda'][0]['details']['activity_label']
|
||||
== 'ADL ELEMENTAIRE Maourine Juin'
|
||||
)
|
||||
|
||||
|
||||
def test_catalog_personnalise_extrasco2(extrasco_subscribe_info2):
|
||||
assert (
|
||||
extrasco_subscribe_info2['info']['activity']['libelle1']
|
||||
== 'PUBLIK ADL MATERNELLE Lardenne JUIN 22/23 (NEPAS UTILISER)'
|
||||
)
|
||||
assert extrasco_subscribe_info2['info']['calendarGeneration']['code'] == 'FORBIDDEN'
|
||||
assert extrasco_subscribe_info2['info']['billingInformation'] == {
|
||||
'modeFact': 'PRESENCE',
|
||||
'quantity': None,
|
||||
'unitPrice': 11.5,
|
||||
}
|
||||
assert extrasco_subscribe_info2['info']['activity']['blocNoteList'] == [
|
||||
{
|
||||
'note': 'Lien vers le réglement intérieur :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/03/23/flyer-sejour.pdf\r\nLien vers arrêté municipal :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/04/05/arrete-municipal.pdf',
|
||||
'numIndex': 1,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_direct_subscribe(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['hugo_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# no idIns provided to remove subscription later
|
||||
assert resp.json()['data'] == {'controlOK': True, 'message': None}
|
||||
|
||||
|
||||
def test_subscribe_with_conveyance(conn, create_data, extrasco_subscribe_info):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
assert extrasco_subscribe_info['info']['conveyance'] is not None
|
||||
morning = [
|
||||
x['id'] for x in extrasco_subscribe_info['info']['conveyance']['morningJourney']['depositPlaceList']
|
||||
]
|
||||
afternoon = [
|
||||
x['id'] for x in extrasco_subscribe_info['info']['conveyance']['afternoonJourney']['depositPlaceList']
|
||||
]
|
||||
assert len(morning) > 0
|
||||
assert len(afternoon) > 0
|
||||
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'conveyanceSubscribe/idPlaceMorning': morning[0],
|
||||
'conveyanceSubscribe/idPlaceAfternoon': afternoon[0],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
basket_id = resp.json()['data']['basket']['id']
|
||||
|
||||
# remove subscription
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
|
||||
def test_subscribe_with_recurrent_week(conn, create_data, extrasco_subscribe_info):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
assert [(x['id'], x['day']) for x in extrasco_subscribe_info['info']['recurrent_week']] == [
|
||||
('1-X', 'Lundi'),
|
||||
('2-X', 'Mardi'),
|
||||
('3-X', 'Mercredi'),
|
||||
('4-X', 'Jeudi'),
|
||||
('5-X', 'Vendredi'),
|
||||
]
|
||||
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'recurrent_week': ['1-X', '2-X'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
basket_id = resp.json()['data']['basket']['id']
|
||||
|
||||
# there is now some bookings
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
# check quantity into basket
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
line = resp.json()['data'][0]['lignes'][0]
|
||||
assert line['prixUnit'] == 11.5
|
||||
assert line['qte'] > 0
|
||||
assert line['montant'] == line['prixUnit'] * line['qte']
|
||||
|
||||
# remove subscription
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
|
||||
def test_subscribe_with_agenda(conn, create_data, extrasco_subscribe_info):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_bookings():
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_perisco_bookings():
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return [
|
||||
item
|
||||
for item in resp.json()['data']
|
||||
if item['details']['activity_id'] == extrasco_subscribe_info['activity']['id']
|
||||
]
|
||||
|
||||
# subscribe without providing calendar
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
basket_id = resp.json()['data']['basket']['id']
|
||||
|
||||
# no booking
|
||||
assert not any(x['prefill'] for x in get_bookings())
|
||||
assert not any(x['prefill'] for x in get_perisco_bookings())
|
||||
|
||||
# book using info calendar gabarit (booking registered from w.c.s. form)
|
||||
assert len(extrasco_subscribe_info['info']['agenda']) > 0
|
||||
assert not any(x['prefill'] for x in extrasco_subscribe_info['info']['agenda'])
|
||||
slots = [x['id'] for x in extrasco_subscribe_info['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
|
||||
# there is now 2 bookings
|
||||
assert len([x['prefill'] for x in get_bookings() if x['prefill'] is True]) == 2
|
||||
perisco_bookings = get_perisco_bookings()
|
||||
assert len([x['prefill'] for x in perisco_bookings if x['prefill'] is True]) == 2
|
||||
assert perisco_bookings[0]['details']['activity_label'] == 'ADL ELEMENTAIRE Maourine Juin'
|
||||
|
||||
# check quantity into basket
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
line = resp.json()['data'][0]['lignes'][0]
|
||||
assert (line['prixUnit'], line['qte'], line['montant']) == (11.5, 0.0, 0.0)
|
||||
|
||||
# unbook slots
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [False, False]
|
||||
assert not any(x['prefill'] for x in get_bookings())
|
||||
|
||||
# remove subscription
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
|
||||
@pytest.mark.xfail(run=False)
|
||||
def test_daily_capacity(conn, create_data2, extrasco_subscribe_info3):
|
||||
assert extrasco_subscribe_info3['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def subscribe(child):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['%s_num' % child],
|
||||
'activity_id': extrasco_subscribe_info3['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info3['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info3['place']['id'],
|
||||
'start_date': extrasco_subscribe_info3['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info3['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']['basket']['id']
|
||||
|
||||
def book(child, slot):
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['%s_num' % child],
|
||||
'activity_id': extrasco_subscribe_info3['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info3['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info3['unit']['dateEnd'][:10],
|
||||
'booking_list': [slot],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
# subscribe all family childs
|
||||
basket_id = subscribe('bart')
|
||||
for child in 'lisa', 'maggie', 'hugo':
|
||||
assert subscribe(child) == basket_id
|
||||
|
||||
# book all childs on the same day
|
||||
assert len(extrasco_subscribe_info3['info']['agenda']) > 0
|
||||
assert not any(x['prefill'] for x in extrasco_subscribe_info3['info']['agenda'])
|
||||
slots = [x['id'] for x in extrasco_subscribe_info3['info']['agenda'] if x['disabled'] is False]
|
||||
for child in 'bart', 'lisa', 'maggie':
|
||||
resp = book(child, slots[-1])
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True]
|
||||
resp = book('hugo', slots[-1])
|
||||
assert resp.json()['err'] == 1
|
||||
assert resp.json()['err_desc'] == 0
|
||||
|
||||
# # remove subscriptions
|
||||
# url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
# payload = {'basket_id': basket_id}
|
||||
# resp = requests.post(url, json=payload)
|
||||
# resp.raise_for_status()
|
||||
# assert resp.json()['err'] == 0
|
|
@ -1,557 +0,0 @@
|
|||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
def test_basket_subscribe_extrasco(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'EXTRASCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_bookings(person_id):
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1 # 3 sur Larden
|
||||
|
||||
subs = subscriptions(create_data['bart_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 1
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert data[0]['text'] == 'ENFANCE LOISIRS'
|
||||
assert len(data[0]['lignes']) == 1 # 3 sur Larden
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
# get 3 idIns because we subscribe a generic unit
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1 # 3 sur Larden
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# cannot subscribe Bart twice
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 1
|
||||
assert 'E1019' in resp.json()['err_desc']
|
||||
assert len(get_baskets()) == 1
|
||||
|
||||
# delete basket
|
||||
# should be call by user or by cron job
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
subs = subscriptions(create_data['maggie_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 1
|
||||
|
||||
# delete (generic) basket line for Bart
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(data[0]['lignes']) == 2 # 6 sur Larden
|
||||
basket_id = data[0]['id']
|
||||
# line for the generic unit for Bart
|
||||
line_id = [
|
||||
y['id']
|
||||
for x in data
|
||||
for y in x['lignes']
|
||||
if y['personneInfo']['numPerson'] == int(create_data['bart_num'])
|
||||
if y['inscription']['idUnit'] == extrasco_subscribe_info['unit']['id']
|
||||
][0]
|
||||
url = conn + '/delete-basket-line?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'basket_id': basket_id,
|
||||
'line_id': line_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['lignes']}) == 1 # 3 sur Larden
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(get_baskets()) == 1
|
||||
assert len(data[0]['lignes']) == 1 # 3 sur Larden
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# re-subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# add bookings to Bart
|
||||
slots = [x['id'] for x in extrasco_subscribe_info['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['bart_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# add bookings to Maggie
|
||||
slots = [':'.join([create_data['maggie_num']] + x.split(':')[1:]) for x in slots]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['maggie_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# delete basket
|
||||
# should be call by user or by cron job
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
|
||||
@pytest.mark.xfail(run=False)
|
||||
def test_basket_subscribe_extrasco2(conn, create_data, extrasco_subscribe_info2, reference_year):
|
||||
"""Subscribing to a generic unit"""
|
||||
assert extrasco_subscribe_info2['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info2['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info2['place']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'EXTRASCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_bookings(person_id):
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1 # 3 expected
|
||||
|
||||
subs = subscriptions(create_data['bart_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 2
|
||||
assert [x['libelle'] for x in subs[0]['subscribesUnit']] == [
|
||||
'PUBLIK ADL MATERNELLE Lardenne JUIN 22/23 (NEPAS UTILISER)',
|
||||
'PUBLIK ADL MATER JOURNEE AVEC REPAS',
|
||||
]
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert data[0]['text'] == 'ENFANCE LOISIRS'
|
||||
assert len(data[0]['lignes']) == 1 # 3 expected
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
# we should get 3 idIns because we subscribe a generic unit
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1 # 3 expected
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# cannot subscribe Bart twice
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 1
|
||||
assert 'E1019' in resp.json()['err_desc']
|
||||
assert len(get_baskets()) == 1
|
||||
|
||||
# delete basket
|
||||
# should be call by user or by cron job
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# delete (generic) basket line for Bart
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(data[0]['lignes']) == 2 # 6 sur Larden
|
||||
basket_id = data[0]['id']
|
||||
# line for the generic unit for Bart
|
||||
line_id = [
|
||||
y['id']
|
||||
for x in data
|
||||
for y in x['lignes']
|
||||
if y['personneInfo']['numPerson'] == int(create_data['bart_num'])
|
||||
if y['inscription']['idUnit'] == extrasco_subscribe_info2['unit']['id']
|
||||
][0]
|
||||
url = conn + '/delete-basket-line?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'basket_id': basket_id,
|
||||
'line_id': line_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['lignes']}) == 1 # 3 sur Larden
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(get_baskets()) == 1
|
||||
assert len(data[0]['lignes']) == 1 # 3 sur Larden
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# re-subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# add bookings to Bart
|
||||
slots = [x['id'] for x in extrasco_subscribe_info2['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['bart_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# add bookings to Maggie
|
||||
slots = [':'.join([create_data['maggie_num']] + x.split(':')[1:]) for x in slots]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['maggie_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# delete basket
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
|
||||
def test_basket_subscribe_loisirs(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'LOISIRS',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return [
|
||||
x
|
||||
for x in resp.json()['data']
|
||||
if x['libelle'] == 'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES'
|
||||
]
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 109
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1
|
||||
subs = subscriptions(create_data['bart_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 2
|
||||
assert [x['libelle'] for x in subs[0]['subscribesUnit']] == [
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES',
|
||||
'MERCREDI - 15h30/17h - 8/15Ans',
|
||||
]
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 109
|
||||
assert data[0]['text'] == 'SPORT'
|
||||
assert len(data[0]['lignes']) == 1
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1
|
||||
assert data[0]['lignes'][0]['montant'] == 88.5
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# cannot subscribe Bart twice
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 1
|
||||
assert 'E1019' in resp.json()['err_desc']
|
||||
assert len(get_baskets()) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
subs = subscriptions(create_data['maggie_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 2
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['id'] == basket_id
|
||||
assert data[0]['codeRegie'] == 109
|
||||
assert data[0]['text'] == 'SPORT'
|
||||
assert len(data[0]['lignes']) == 2
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 2
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 2
|
||||
assert all(x['montant'] == 88.5 for x in data[0]['lignes'])
|
||||
|
||||
# delete basket line for Bart
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(data[0]['lignes']) == 2
|
||||
basket_id = data[0]['id']
|
||||
# line for Bart
|
||||
line_id = [
|
||||
y['id']
|
||||
for x in data
|
||||
for y in x['lignes']
|
||||
if y['personneInfo']['numPerson'] == int(create_data['bart_num'])
|
||||
][0]
|
||||
url = conn + '/delete-basket-line?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'basket_id': basket_id,
|
||||
'line_id': line_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['codeRegie'] == 109
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['lignes']}) == 1
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(get_baskets()) == 1
|
||||
assert len(data[0]['lignes']) == 1
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# delete basket
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
|
@ -1,346 +0,0 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import diff, link, unlink
|
||||
|
||||
|
||||
def test_direct_debit_order(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'codeRegie': '102',
|
||||
'bank/bankBIC': 'BDFEFR2T',
|
||||
'bank/bankIBAN': 'FR7630001007941234567890185',
|
||||
'bank/bankRUM': 'xxx',
|
||||
'bank/dateStart': '2023-01-01',
|
||||
'bank/bankAddress': '75049 PARIS cedex 01',
|
||||
'bank/civility': 'x',
|
||||
'bank/lastName': 'Ewing',
|
||||
'bank/firstName': 'John Ross',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
url = conn + '/get-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'codeRegie': '102',
|
||||
'dateRef': '2023-01-01',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
res['data']['numPerson'] = 'N/A'
|
||||
assert diff(res['data'], 'test_get_rl1_direct_debit_order.json')
|
||||
|
||||
|
||||
def test_pay_invoice_loisirs(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 109
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 109
|
||||
assert data[0]['text'] == 'SPORT'
|
||||
assert len(data[0]['lignes']) == 2
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 2
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 2
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# validate basket de generate an invoice
|
||||
url = conn + '/validate-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data['idInsLst']) == 2
|
||||
assert len(data['factureLst']) == 1
|
||||
assert len(data['factureLst'][0]['lineInvoiceList']) == 2
|
||||
assert data['factureLst'][0]['regie']['code'] == 109
|
||||
invoice_num = data['factureLst'][0]['numInvoice']
|
||||
invoice_id = data['factureLst'][0]['idInvoice']
|
||||
assert get_baskets() == []
|
||||
|
||||
# get invoices paid
|
||||
url = conn + '/regie/109/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'data': [], 'err': 0}
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/109/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert data[0]['amount'] == '177' # ou juste > 0 ?
|
||||
assert data[0]['online_payment'] is True
|
||||
assert data[0]['paid'] is False
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
||||
|
||||
# payInvoice
|
||||
url = conn + '/regie/109/invoice/%s-%s/pay/' % (create_data['family_id'], invoice_num)
|
||||
payload = {
|
||||
'transaction_date': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S'),
|
||||
'transaction_id': 'xxx',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/109/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'has_invoice_for_payment': True, 'data': [], 'err': 0}
|
||||
|
||||
# get invoices paid
|
||||
url = conn + '/regie/109/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert data[0]['amount'] == '0'
|
||||
assert data[0]['total_amount'] == '177' # ou juste > 0 ?
|
||||
assert data[0]['online_payment'] is False
|
||||
assert data[0]['paid'] is True
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
||||
|
||||
|
||||
def test_payinvoice_extrasco(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'EXTRASCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_bookings(person_id):
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1
|
||||
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert len(data[0]['lignes']) == 1
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# add bookings to Bart
|
||||
slots = [x['id'] for x in extrasco_subscribe_info['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['bart_num']) if x['prefill'] is True]) > 0
|
||||
|
||||
# add bookings to Maggie
|
||||
slots = [':'.join([create_data['maggie_num']] + x.split(':')[1:]) for x in slots]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['maggie_num']) if x['prefill'] is True]) > 0
|
||||
|
||||
# validate basket
|
||||
url = conn + '/validate-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data['idInsLst']) == 2
|
||||
assert len(data['factureLst']) == 1
|
||||
assert get_baskets() == []
|
||||
assert len(data['factureLst'][0]['lineInvoiceList']) == 2
|
||||
assert data['factureLst'][0]['regie']['code'] == 105
|
||||
invoice_num = data['factureLst'][0]['numInvoice']
|
||||
invoice_id = data['factureLst'][0]['idInvoice']
|
||||
|
||||
# get invoices paid
|
||||
url = conn + '/regie/105/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'data': [], 'err': 0}
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/105/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert int(data[0]['amount']) > 0
|
||||
assert data[0]['online_payment'] is True
|
||||
assert data[0]['paid'] is False
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
||||
|
||||
# payInvoice
|
||||
url = conn + '/regie/105/invoice/%s-%s/pay/' % (create_data['family_id'], invoice_num)
|
||||
payload = {
|
||||
'transaction_date': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S'),
|
||||
'transaction_id': 'xxx',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/105/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'has_invoice_for_payment': True, 'data': [], 'err': 0}
|
||||
|
||||
# get invoices history
|
||||
url = conn + '/regie/105/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert data[0]['amount'] == '0'
|
||||
assert int(data[0]['total_amount']) > 0
|
||||
assert data[0]['online_payment'] is False
|
||||
assert data[0]['paid'] is True
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
|
@ -5,27 +5,20 @@ from .conftest import diff
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'ref',
|
||||
"ref",
|
||||
[
|
||||
'ape-indicators',
|
||||
'category',
|
||||
'child-indicator',
|
||||
'civility',
|
||||
'country',
|
||||
'county',
|
||||
'csp',
|
||||
'dietcode',
|
||||
'document',
|
||||
'exemption-reasons',
|
||||
#'nursery',
|
||||
'organ',
|
||||
'pai',
|
||||
'quality',
|
||||
'quotient',
|
||||
#'regie',
|
||||
'rl-indicator',
|
||||
'school-levels',
|
||||
'school-years',
|
||||
'situation',
|
||||
'street',
|
||||
'vaccin',
|
||||
|
@ -41,5 +34,4 @@ def test_referentials(conn, referentials, ref):
|
|||
for item in res['data']:
|
||||
assert 'id' in item
|
||||
assert 'text' in item
|
||||
if ref not in ['street', 'county', 'nursery']:
|
||||
assert diff(res['data'], 'test_read_%s_list.json' % ref)
|
||||
assert diff(res['data'], 'test_read_%s_list.json' % ref)
|
|
@ -9,33 +9,21 @@ from .conftest import diff, diff_child, diff_family, diff_rlg, link, read_family
|
|||
FAMILY_RESET_PAYLOAD = {
|
||||
'category': 'AUTR',
|
||||
'situation': 'AUTR',
|
||||
'nbChild': '',
|
||||
'nbTotalChild': '',
|
||||
'nbAES': '',
|
||||
'rl1': {
|
||||
'civility': 'MR', # no effect
|
||||
'firstname': 'Marge', # must be
|
||||
'lastname': 'Test_Simpson', # must be
|
||||
'lastname': 'Simpson', # must be
|
||||
'maidenName': 'reset', # no effect
|
||||
'quality': 'AU',
|
||||
'birth': {
|
||||
'dateBirth': '1950-10-01', # must be
|
||||
'countryCode': '',
|
||||
},
|
||||
'birth': {'dateBirth': '1950-10-01'}, # must be
|
||||
'adresse': {'idStreet': '', 'street1': 'reset', 'town': 'reset', 'zipcode': 'reset'},
|
||||
},
|
||||
'rl2': {
|
||||
'civility': 'MME', # no effect
|
||||
'firstname': 'Homer', # must be
|
||||
'lastname': 'Test_Simpson', # must be
|
||||
'lastname': 'Simpson', # must be
|
||||
'quality': 'AU',
|
||||
'birth': {
|
||||
'dateBirth': '1956-05-12', # must be
|
||||
'place': '',
|
||||
'communeCode': '',
|
||||
'cdDepartment': '',
|
||||
'countryCode': '404',
|
||||
},
|
||||
'birth': {'dateBirth': '1956-05-12'}, # must be
|
||||
'adresse': {
|
||||
'num': '42',
|
||||
'numComp': 'Q',
|
||||
|
@ -80,13 +68,7 @@ FAMILY_RESET_PAYLOAD = {
|
|||
'sexe': 'F',
|
||||
'firstname': 'Bartolome', # some side effects, cf test_update_child
|
||||
'lastname': 'Simps',
|
||||
'birth': {
|
||||
'dateBirth': '1970-01-01',
|
||||
'place': '',
|
||||
'communeCode': '',
|
||||
'cdDepartment': '',
|
||||
'countryCode': '404',
|
||||
},
|
||||
'birth': {'dateBirth': '1970-01-01'},
|
||||
'bPhoto': False,
|
||||
'bLeaveAlone': False,
|
||||
'dietcode': '',
|
||||
|
@ -236,24 +218,19 @@ def test_update_family(conn, update_data):
|
|||
|
||||
def test_create_family(conn, create_data, update_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
|
||||
# search the 'Test_Simpson' default test family
|
||||
resp = requests.get(conn + '/search-family?q=Test_Simpson')
|
||||
resp.raise_for_status()
|
||||
assert len(resp.json()['data']) >= 1
|
||||
assert any(data['RL1']['lastname'] == 'TEST_SIMPSON' for data in resp.json()['data'])
|
||||
|
||||
link(conn, create_data)
|
||||
url = conn + '/create-family?NameID=%s' % create_data['name_id']
|
||||
|
||||
# RL1 already exists (on update_data) error
|
||||
unlink(conn, create_data['name_id'])
|
||||
payload = copy.deepcopy(create_data['family_payload'])
|
||||
payload['rl1']['lastname'] = 'Test_Simpson'
|
||||
payload['rl1']['lastname'] = 'Simpson'
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 1
|
||||
assert 'Il existe déjà un Responsable Légal correspondant' in res['err_desc']
|
||||
assert res['err_class'] == 'passerelle.utils.jsonresponse.APIError'
|
||||
assert res['err_class'] == 'passerelle.utils.soap.SOAPFault'
|
||||
|
||||
# RL1 already exists (on update_data, as RL2) error
|
||||
payload['rl1']['firstname'] = 'Homer'
|
||||
|
@ -263,12 +240,12 @@ def test_create_family(conn, create_data, update_data):
|
|||
res = resp.json()
|
||||
assert res['err'] == 1
|
||||
assert 'Il existe déjà un Responsable Légal correspondant' in res['err_desc']
|
||||
assert res['err_class'] == 'passerelle.utils.jsonresponse.APIError'
|
||||
assert res['err_class'] == 'passerelle.utils.soap.SOAPFault'
|
||||
|
||||
|
||||
def test_is_rl_exists(conn, update_data):
|
||||
url = conn + '/is-rl-exists'
|
||||
payload = {'firstname': 'Marge', 'lastname': 'Test_Simpson', 'dateBirth': '1950-10-01'}
|
||||
payload = {'firstname': 'Marge', 'lastname': 'Simpson', 'dateBirth': '1950-10-01'}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'err': 0, 'data': True}
|
||||
|
@ -285,7 +262,7 @@ def test_is_rl_exists(conn, update_data):
|
|||
assert resp.json() == {'err': 0, 'data': False}
|
||||
|
||||
# test on rl2
|
||||
payload = {'firstname': 'Homer', 'lastname': 'Test_Simpson', 'dateBirth': '1956-05-12'}
|
||||
payload = {'firstname': 'Homer', 'lastname': 'Simpson', 'dateBirth': '1956-05-12'}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'err': 0, 'data': True}
|
||||
|
@ -309,7 +286,7 @@ def test_create_rl2(conn, create_data, update_data):
|
|||
assert diff_rlg(conn, create_data['name_id'], 2, 'test_create_rl2.json')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('rl', ['1', '2'])
|
||||
@pytest.mark.parametrize("rl", ['1', '2'])
|
||||
def test_update_rlg(conn, update_data, rl):
|
||||
rlg = 'rl' + rl
|
||||
RLG = 'RL' + rl
|
||||
|
@ -370,7 +347,7 @@ def test_update_rlg(conn, update_data, rl):
|
|||
in res['err_desc']
|
||||
)
|
||||
else:
|
||||
assert 'La date de naissance ne peut pas être modifiée' in res['err_desc']
|
||||
assert "La date de naissance ne peut pas être modifiée" in res['err_desc']
|
||||
|
||||
# restore RL1
|
||||
payload = copy.deepcopy(update_data['family_payload'][rlg])
|
||||
|
@ -459,7 +436,7 @@ def test_create_child(conn, create_data, update_data):
|
|||
assert 'E65 : Il existe déjà un enfant correspondant' in res['err_desc']
|
||||
|
||||
# child already exists error (Lisa form update_data)
|
||||
payload['lastname'] = 'Test_Simpson'
|
||||
payload['lastname'] = 'Simpson'
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
|
@ -618,24 +595,6 @@ def test_update_child_medical_record(conn, update_data):
|
|||
update_data['bart_num'],
|
||||
)
|
||||
|
||||
# update only doctor
|
||||
# #2720: allergies comments, and observations are erased
|
||||
payload = {
|
||||
'familyDoctor': {
|
||||
'name': 'Hibbert',
|
||||
'phone': '0656785678',
|
||||
'address': {
|
||||
'street1': 'General Hospital',
|
||||
'zipcode': '90701',
|
||||
'town': 'Springfield',
|
||||
},
|
||||
},
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert diff_child(conn, update_data['name_id'], 0, 'test_update_child_doctor.json', key='medicalRecord')
|
||||
|
||||
# reset medical record
|
||||
payload = FAMILY_RESET_PAYLOAD['childList'][0]['medicalRecord']
|
||||
resp = requests.post(url, json=payload)
|
||||
|
@ -799,22 +758,21 @@ def test_update_quotient(conn, create_data):
|
|||
'dateStart': '2022-01-01',
|
||||
'dateEnd': '2022-12-31',
|
||||
'mtt': '1500.33',
|
||||
'cdquo': '2',
|
||||
'cdquo': '1',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = read_family(conn, create_data['name_id'])
|
||||
assert len(data['RL1']['quotientList']) == 2
|
||||
assert data['RL1']['quotients']['2'] == [
|
||||
assert data['RL1']['quotientList'] == [
|
||||
{
|
||||
'yearRev': 2021,
|
||||
'dateStart': '2022-01-01T00:00:00+01:00',
|
||||
'dateEnd': '2022-12-31T00:00:00+01:00',
|
||||
'mtt': 1500.33,
|
||||
'cdquo': '2',
|
||||
'cdquo': '1',
|
||||
'codeUti': None,
|
||||
'cdquo_text': 'Revenus Petite enfance',
|
||||
'cdquo_text': 'Revenus fiscaux',
|
||||
}
|
||||
]
|
||||
|
||||
|
@ -824,7 +782,7 @@ def test_update_quotient(conn, create_data):
|
|||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = read_family(conn, create_data['name_id'])
|
||||
assert len(data['RL1']['quotients']['2']) == 2
|
||||
assert len(data['RL1']['quotientList']) == 2
|
||||
|
||||
# add quotient on another income year
|
||||
payload['yearRev'] = '2020'
|
||||
|
@ -832,7 +790,7 @@ def test_update_quotient(conn, create_data):
|
|||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = diff_rlg(conn, create_data['name_id'], 1, 'test_update_quotient.json', 'quotientList')
|
||||
assert len(data['RL1']['quotients']['2']) == 3
|
||||
assert len(data['RL1']['quotientList']) == 3
|
||||
|
||||
# test read-family with reference year
|
||||
url = conn + '/read-family?NameID=%s&income_year=%s' % (create_data['name_id'], '2020')
|
||||
|
@ -932,7 +890,7 @@ def test_read_family_members(conn, update_data):
|
|||
assert res['data']['personInfo']['firstname'] == 'ABRAHAM JEBEDIAH'
|
||||
|
||||
|
||||
def test_supplied_document(conn, create_data):
|
||||
def test_add_supplied_document(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
|
@ -940,8 +898,6 @@ def test_supplied_document(conn, create_data):
|
|||
payload = {
|
||||
'documentList/0/code': '46',
|
||||
'documentList/0/depositDate': '2022-12-20',
|
||||
'documentList/0/visaDate': '2022-12-21',
|
||||
'documentList/0/validityDate': '2022-12-22',
|
||||
'documentList/0/file': { # w.c.s. file field
|
||||
'filename': '201x201.jpg',
|
||||
'content_type': 'image/jpeg',
|
||||
|
@ -955,7 +911,6 @@ def test_supplied_document(conn, create_data):
|
|||
assert res['err'] == 0
|
||||
|
||||
# push on RL
|
||||
payload['documentList/0/code'] = '85'
|
||||
payload['numPerson'] = create_data['rl1_num']
|
||||
url = conn + '/add-supplied-document?NameID=%s' % create_data['name_id']
|
||||
resp = requests.post(url, json=payload)
|
||||
|
@ -963,44 +918,10 @@ def test_supplied_document(conn, create_data):
|
|||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
||||
# push on child
|
||||
payload['documentList/0/code'] = '69'
|
||||
# push on childe
|
||||
payload['numPerson'] = create_data['bart_num']
|
||||
url = conn + '/add-supplied-document?NameID=%s' % create_data['name_id']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
||||
# check validity on family
|
||||
params = {
|
||||
'code': '46',
|
||||
'ref_date': '2022-12-22',
|
||||
}
|
||||
url = conn + '/read-supplied-document-validity?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
||||
# check validity on RL
|
||||
params = {
|
||||
'code': '85',
|
||||
'person_id': create_data['rl1_num'],
|
||||
'ref_date': '2022-12-22',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
||||
# check validity on child
|
||||
params = {
|
||||
'code': '69',
|
||||
'person_id': create_data['bart_num'],
|
||||
'ref_date': '2022-12-22',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
|
@ -0,0 +1,36 @@
|
|||
import requests
|
||||
|
||||
from .conftest import diff, link, unlink
|
||||
|
||||
|
||||
def test_direct_debit_order(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'codeRegie': '1',
|
||||
'bank/bankBIC': 'BDFEFR2T',
|
||||
'bank/bankIBAN': 'FR7630001007941234567890185',
|
||||
'bank/bankRUM': 'xxx',
|
||||
'bank/dateStart': '2023-01-01',
|
||||
'bank/bankAddress': '75049 PARIS cedex 01',
|
||||
'bank/civility': 'x',
|
||||
'bank/lastName': 'Ewing',
|
||||
'bank/firstName': 'John Ross',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
url = conn + '/get-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'codeRegie': '1',
|
||||
'dateRef': '2023-01-01',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
res['data']['numPerson'] = 'N/A'
|
||||
assert diff(res['data'], 'test_get_rl1_direct_debit_order.json')
|
|
@ -36,7 +36,7 @@ def test_link(conn, update_data):
|
|||
res = resp.json()
|
||||
assert res['err'] == 1
|
||||
assert res['err_class'] == 'passerelle.utils.soap.SOAPFault'
|
||||
assert 'E02 : Le dossier numéro [999999] ne correspond à aucune famille' in res['err_desc']
|
||||
assert "E02 : Le dossier numéro [999999] ne correspond à aucune famille" in res['err_desc']
|
||||
|
||||
# wrong DUI firstname
|
||||
payload = {
|
|
@ -2,9 +2,9 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption('--url', help='Url of a passerelle Vivaticket connector instance')
|
||||
parser.addoption("--url", help="Url of a passerelle Vivaticket connector instance")
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption('--url')
|
||||
return request.config.getoption("--url")
|
||||
|
|
|
@ -6,7 +6,7 @@ import requests
|
|||
|
||||
|
||||
def call_generic(conn, endpoint):
|
||||
print('%s \n' % endpoint)
|
||||
print("%s \n" % endpoint)
|
||||
url = conn + '/%s' % endpoint
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -50,7 +50,7 @@ def test_book_event(conn):
|
|||
themes = call_generic(conn, 'themes')
|
||||
random.shuffle(themes)
|
||||
payload['theme'] = themes[0]['id']
|
||||
print('Creating booking with the following payload:\n%s' % payload)
|
||||
print("Creating booking with the following payload:\n%s" % payload)
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
#!/bin/sh -ue
|
||||
|
||||
test -d wcs || git clone https://git.entrouvert.org/wcs.git
|
||||
(cd wcs && git pull)
|
|
@ -2,8 +2,8 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
if __name__ == '__main__':
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'passerelle.settings')
|
||||
if __name__ == "__main__":
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passerelle.settings")
|
||||
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
|
|
|
@ -102,7 +102,6 @@ class AddressResource(BaseResource):
|
|||
@endpoint(
|
||||
name='sectors',
|
||||
description=_('List related Sectorizations'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'id': {'description': _('Sector Identifier (slug)')},
|
||||
'q': {'description': _('Filter by Sector Title or Identifier')},
|
||||
|
|
|
@ -2,6 +2,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('base', '0006_resourcestatus'),
|
||||
]
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('actesweb', '0001_initial'),
|
||||
]
|
||||
|
|
|
@ -48,7 +48,7 @@ class ActesWeb(BaseResource):
|
|||
def basepath(self):
|
||||
return os.path.join(default_storage.path('actesweb'), self.slug)
|
||||
|
||||
@endpoint(methods=['post'], description=_('Create demand'))
|
||||
@endpoint(perm='can_access', methods=['post'], description=_('Create demand'))
|
||||
def create(self, request, *args, **kwargs):
|
||||
try:
|
||||
payload = json.loads(request.body)
|
||||
|
|
|
@ -1,77 +0,0 @@
|
|||
# Generated by Django 3.2.18 on 2023-07-07 10:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('base', '0030_resourcelog_base_resour_appname_298cbc_idx'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AdullactPastell',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
|
||||
),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
(
|
||||
'basic_auth_username',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication username'
|
||||
),
|
||||
),
|
||||
(
|
||||
'basic_auth_password',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication password'
|
||||
),
|
||||
),
|
||||
(
|
||||
'client_certificate',
|
||||
models.FileField(
|
||||
blank=True, null=True, upload_to='', verbose_name='TLS client certificate'
|
||||
),
|
||||
),
|
||||
(
|
||||
'trusted_certificate_authorities',
|
||||
models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs'),
|
||||
),
|
||||
(
|
||||
'verify_cert',
|
||||
models.BooleanField(blank=True, default=True, verbose_name='TLS verify certificates'),
|
||||
),
|
||||
(
|
||||
'http_proxy',
|
||||
models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'),
|
||||
),
|
||||
(
|
||||
'api_base_url',
|
||||
models.URLField(
|
||||
help_text='Example: https://pastell.example.com/api/v2/',
|
||||
max_length=128,
|
||||
verbose_name='API base URL',
|
||||
),
|
||||
),
|
||||
('token', models.CharField(blank=True, max_length=128, verbose_name='API token')),
|
||||
(
|
||||
'users',
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name='_adullact_pastell_adullactpastell_users_+',
|
||||
related_query_name='+',
|
||||
to='base.ApiUser',
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Adullact Pastell',
|
||||
},
|
||||
),
|
||||
]
|
|
@ -1,265 +0,0 @@
|
|||
# passerelle - uniform access to multiple data sources and services
|
||||
# Copyright (C) 2023 Entr'ouvert
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import base64
|
||||
from urllib import parse as urlparse
|
||||
|
||||
import requests
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.http import HttpResponse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from passerelle.base.models import BaseResource, HTTPResource
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
|
||||
FILE_OBJECT_PROPERTIES = {
|
||||
'title': _('File object'),
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'description': _('Filename'),
|
||||
},
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'description': _('Content'),
|
||||
},
|
||||
'content_type': {
|
||||
'type': 'string',
|
||||
'description': _('Content type'),
|
||||
},
|
||||
},
|
||||
'required': ['filename', 'content'],
|
||||
}
|
||||
|
||||
|
||||
DOCUMENT_CREATION_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'required': ['type'],
|
||||
'additionalProperties': True,
|
||||
'properties': {
|
||||
'type': {'type': 'string', 'description': _('Document type')},
|
||||
'file_field_name': {
|
||||
'type': 'string',
|
||||
'description': _('Document file\'s field name'),
|
||||
},
|
||||
'file': FILE_OBJECT_PROPERTIES,
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'description': _('Filename (takes precedence over filename in "file" object)'),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
DOCUMENT_FILE_UPLOAD_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'required': ['file', 'file_field_name'],
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'description': _('Filename (takes precedence over filename in "file" object)'),
|
||||
},
|
||||
'file': FILE_OBJECT_PROPERTIES,
|
||||
'file_field_name': {
|
||||
'type': 'string',
|
||||
'description': _('Document file\'s field name'),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class AdullactPastell(BaseResource, HTTPResource):
|
||||
api_base_url = models.URLField(
|
||||
max_length=128,
|
||||
verbose_name=_('API base URL'),
|
||||
help_text=_('Example: https://pastell.example.com/api/v2/'),
|
||||
)
|
||||
token = models.CharField(max_length=128, blank=True, verbose_name=_('API token'))
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
||||
log_requests_errors = False
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Adullact Pastell')
|
||||
|
||||
def clean(self, *args, **kwargs):
|
||||
if not self.token and not self.basic_auth_username:
|
||||
raise ValidationError(_('API token or authentication username and password should be defined.'))
|
||||
return super().clean(*args, **kwargs)
|
||||
|
||||
def call(self, path, method='get', params=None, **kwargs):
|
||||
url = urlparse.urljoin(self.api_base_url, path)
|
||||
if self.token:
|
||||
kwargs.update({'headers': {'Authorization': 'Bearer: %s' % self.token}, 'auth': None})
|
||||
try:
|
||||
response = self.requests.request(url=url, method=method, params=params, **kwargs)
|
||||
response.raise_for_status()
|
||||
except (requests.Timeout, requests.RequestException) as e:
|
||||
raise APIError(str(e))
|
||||
return response
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
response = self.call('version')
|
||||
except APIError as e:
|
||||
raise Exception('Pastell server is down: %s' % e)
|
||||
return {'data': response.json()}
|
||||
|
||||
def upload_file(self, entity_id, document_id, file_field_name, data, **kwargs):
|
||||
filename = kwargs.get('filename') or data['filename']
|
||||
file_data = {
|
||||
'file_content': (
|
||||
filename,
|
||||
base64.b64decode(data['content']),
|
||||
data.get('content_type'),
|
||||
)
|
||||
}
|
||||
|
||||
return self.call(
|
||||
'entite/%s/document/%s/file/%s' % (entity_id, document_id, file_field_name),
|
||||
'post',
|
||||
files=file_data,
|
||||
data={'file_name': filename},
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
description=_('List entities'),
|
||||
datasource=True,
|
||||
)
|
||||
def entities(self, request):
|
||||
data = []
|
||||
response = self.call('entite')
|
||||
for item in response.json():
|
||||
item['id'] = item['id_e']
|
||||
item['text'] = item['denomination']
|
||||
data.append(item)
|
||||
return {'data': data}
|
||||
|
||||
@endpoint(
|
||||
description=_('List entity documents'),
|
||||
parameters={'entity_id': {'description': _('Entity ID'), 'example_value': '42'}},
|
||||
datasource=True,
|
||||
)
|
||||
def documents(self, request, entity_id):
|
||||
if request.GET.get('id'):
|
||||
response = self.call('entite/%s/document/%s' % (entity_id, request.GET['id']))
|
||||
return {'data': response.json()}
|
||||
|
||||
data = []
|
||||
response = self.call('entite/%s/document' % entity_id)
|
||||
for item in response.json():
|
||||
item['id'] = item['id_d']
|
||||
item['text'] = item['titre']
|
||||
data.append(item)
|
||||
return {'data': data}
|
||||
|
||||
@endpoint(
|
||||
post={
|
||||
'description': _('Create a document for an entity'),
|
||||
'request_body': {'schema': {'application/json': DOCUMENT_CREATION_SCHEMA}},
|
||||
},
|
||||
name='create-document',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
},
|
||||
)
|
||||
def create_document(self, request, entity_id, post_data):
|
||||
file_data = post_data.pop('file', None)
|
||||
file_field_name = post_data.pop('file_field_name', None)
|
||||
|
||||
# create document
|
||||
response = self.call('entite/%s/document' % entity_id, 'post', params=post_data)
|
||||
document_id = response.json()['id_d']
|
||||
|
||||
# update it with other attributes
|
||||
response = self.call('entite/%s/document/%s' % (entity_id, document_id), 'patch', params=post_data)
|
||||
|
||||
# upload file if it's filled
|
||||
if file_field_name and file_data:
|
||||
self.upload_file(entity_id, document_id, file_field_name, file_data, **post_data)
|
||||
|
||||
return {'data': response.json()}
|
||||
|
||||
@endpoint(
|
||||
post={
|
||||
'description': _('Upload a file to a document'),
|
||||
'request_body': {'schema': {'application/json': DOCUMENT_FILE_UPLOAD_SCHEMA}},
|
||||
},
|
||||
name='upload-document-file',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
'document_id': {'description': _('Document ID'), 'example_value': 'hDWtdSC'},
|
||||
},
|
||||
)
|
||||
def upload_document_file(self, request, entity_id, document_id, post_data):
|
||||
file_field_name = post_data.pop('file_field_name')
|
||||
file_data = post_data.pop('file')
|
||||
response = self.upload_file(entity_id, document_id, file_field_name, file_data, **post_data)
|
||||
return {'data': response.json()}
|
||||
|
||||
@endpoint(
|
||||
description=_('Get document\'s file'),
|
||||
name='get-document-file',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
'document_id': {'description': _('Document ID'), 'example_value': 'hDWtdSC'},
|
||||
'field_name': {
|
||||
'description': _('Document file\'s field name'),
|
||||
'example_value': 'document',
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_document_file(self, request, entity_id, document_id, field_name):
|
||||
document = self.call('entite/%s/document/%s/file/%s' % (entity_id, document_id, field_name))
|
||||
response = HttpResponse(document.content, content_type=document.headers['Content-Type'])
|
||||
response['Content-Disposition'] = document.headers['Content-disposition']
|
||||
return response
|
||||
|
||||
@endpoint(
|
||||
post={
|
||||
'description': _('Run action on document'),
|
||||
'request_body': {
|
||||
'schema': {
|
||||
'application/json': {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'required': ['action_name'],
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'action_name': {'type': 'string', 'description': _('Action name')},
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
name='run-document-action',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
'document_id': {'description': _('Document ID'), 'example_value': 'hDWtdSC'},
|
||||
},
|
||||
)
|
||||
def run_document_action(self, request, entity_id, document_id, post_data):
|
||||
response = self.call(
|
||||
'entite/%s/document/%s/action/%s' % (entity_id, document_id, post_data['action_name']), 'post'
|
||||
)
|
||||
return {'data': response.json()}
|
|
@ -2,6 +2,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('base', '0005_resourcelog'),
|
||||
]
|
||||
|
|
|
@ -2,6 +2,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('airquality', '0001_initial'),
|
||||
]
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('airquality', '0002_auto_20170920_0951'),
|
||||
]
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('airquality', '0003_remove_airquality_log_level'),
|
||||
]
|
||||
|
|
|
@ -44,7 +44,6 @@ class AirQuality(BaseResource):
|
|||
@endpoint(
|
||||
pattern=r'^(?P<country>\w+)/(?P<city>\w+)/$',
|
||||
example_pattern='{country}/{city}/',
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'country': {'description': _('Country Code'), 'example_value': 'fr'},
|
||||
'city': {'description': _('City Name'), 'example_value': 'lyon'},
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api_entreprise', '0001_initial'),
|
||||
]
|
||||
|
|
|
@ -14,6 +14,7 @@ def remove_url_path(apps, schema_editor):
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api_entreprise', '0002_auto_20190701_1357'),
|
||||
]
|
||||
|
|
|
@ -185,6 +185,7 @@ class APIEntreprise(BaseResource):
|
|||
METHOD_PARAM = {'description': _('method used for user identity matching'), 'example_value': 'simple'}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<association_id>\w+)/$',
|
||||
example_pattern='{association_id}/',
|
||||
description=_('Get association\'s documents'),
|
||||
|
@ -288,6 +289,7 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': document}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<siren>\w+)/$',
|
||||
example_pattern='{siren}/',
|
||||
description=_('Get firm\'s data from Infogreffe'),
|
||||
|
@ -303,6 +305,7 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': raw_data['data']}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<association_id>\w+)/$',
|
||||
example_pattern='{association_id}/',
|
||||
description=_('Get association\'s related informations'),
|
||||
|
@ -321,6 +324,7 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': res}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<siren>\w+)/$',
|
||||
example_pattern='{siren}/',
|
||||
description=_('Get firm\'s related informations'),
|
||||
|
@ -371,7 +375,6 @@ class APIEntreprise(BaseResource):
|
|||
'v3/infogreffe/rcs/unites_legales/%s/mandataires_sociaux' % siren, raw=True, **kwargs
|
||||
).get('data')
|
||||
for mandataire in mandataires_data:
|
||||
mandataire = mandataire.get('data', {})
|
||||
for key in ('nom', 'prenom', 'fonction'):
|
||||
if key not in mandataire:
|
||||
mandataire[key] = ''
|
||||
|
@ -381,6 +384,7 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': {'entreprise': data, 'etablissement_siege': siege_data}}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
methods=['get'],
|
||||
pattern=r'(?P<siret>\w+)/$',
|
||||
example_pattern='{siret}/',
|
||||
|
@ -415,6 +419,7 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': res}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
methods=['get'],
|
||||
pattern=r'(?P<siret>\w+)/$',
|
||||
example_pattern='{siret}/',
|
||||
|
@ -427,9 +432,67 @@ class APIEntreprise(BaseResource):
|
|||
},
|
||||
)
|
||||
def exercices(self, request, siret, **kwargs):
|
||||
return self.get('v3/dgfip/etablissements/%s/chiffres_affaires' % siret, raw=True, **kwargs)
|
||||
return self.get('v2/exercices/%s/' % siret, **kwargs)
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<siren>\w+)/$',
|
||||
example_pattern='{siren}/',
|
||||
description=_('Get firm\'s annual workforce data'),
|
||||
parameters={
|
||||
'siren': SIREN_PARAM,
|
||||
'object': OBJECT_PARAM,
|
||||
'context': CONTEXT_PARAM,
|
||||
'recipient': RECIPIENT_PARAM,
|
||||
},
|
||||
)
|
||||
def effectifs_annuels_acoss_covid(self, request, siren, **kwargs):
|
||||
if len(siren) != 9:
|
||||
raise APIError(_('invalid SIREN length (must be 9 characters)'))
|
||||
return self.get('v2/effectifs_annuels_acoss_covid/%s/' % siren, **kwargs)
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<year>\w+)/(?P<month>\w+)/(?P<siren>\w+)/$',
|
||||
description=_('Get firm\'s monthly workforce data, by SIREN'),
|
||||
parameters={
|
||||
'year': YEAR_PARAM,
|
||||
'month': MONTH_PARAM,
|
||||
'siren': SIREN_PARAM,
|
||||
'object': OBJECT_PARAM,
|
||||
'context': CONTEXT_PARAM,
|
||||
'recipient': RECIPIENT_PARAM,
|
||||
},
|
||||
)
|
||||
def entreprise_effectifs_mensuels_acoss_covid(self, request, year, month, siren, **kwargs):
|
||||
if len(siren) != 9:
|
||||
raise APIError(_('invalid SIREN length (must be 9 characters)'))
|
||||
month = month.zfill(2)
|
||||
return self.get(
|
||||
'v2/effectifs_mensuels_acoss_covid/%s/%s/entreprise/%s/' % (year, month, siren), **kwargs
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<year>\w+)/(?P<month>\w+)/(?P<siret>\w+)/$',
|
||||
description=_('Get firm\'s monthly workforce data, by SIRET'),
|
||||
parameters={
|
||||
'year': YEAR_PARAM,
|
||||
'month': MONTH_PARAM,
|
||||
'siret': SIRET_PARAM,
|
||||
'object': OBJECT_PARAM,
|
||||
'context': CONTEXT_PARAM,
|
||||
'recipient': RECIPIENT_PARAM,
|
||||
},
|
||||
)
|
||||
def etablissement_effectifs_mensuels_acoss_covid(self, request, year, month, siret, **kwargs):
|
||||
month = month.zfill(2)
|
||||
return self.get(
|
||||
'v2/effectifs_mensuels_acoss_covid/%s/%s/etablissement/%s/' % (year, month, siret), **kwargs
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<siren>\w+)/$',
|
||||
description=_(
|
||||
'Match firm\'s society representative against local FranceConnect identity information'
|
||||
|
@ -448,18 +511,18 @@ class APIEntreprise(BaseResource):
|
|||
def match_mandataire_social(
|
||||
self, request, siren, first_name, last_name, birthdate, method='simple', **kwargs
|
||||
):
|
||||
mandataires = self.get(
|
||||
'v3/infogreffe/rcs/unites_legales/%s/mandataires_sociaux' % siren, raw=True, **kwargs
|
||||
).get('data', [])
|
||||
|
||||
entreprise = self.get(
|
||||
'v2/entreprises/%s/' % siren,
|
||||
raw=True,
|
||||
**kwargs,
|
||||
)
|
||||
methods = {
|
||||
'simple': simple_match,
|
||||
'levenshtein': levenshtein_match,
|
||||
}
|
||||
if method not in methods:
|
||||
return {'err': 1, 'err_desc': 'method %s not implemented' % method}
|
||||
for mandataire in mandataires:
|
||||
mandataire = mandataire.get('data', {})
|
||||
for mandataire in entreprise.get('entreprise', {}).get('mandataires_sociaux', []):
|
||||
if methods[method](mandataire, first_name, last_name, birthdate):
|
||||
return {'err': 0, 'data': mandataire}
|
||||
return {'err': 0, 'data': {}}
|
||||
|
|
|
@ -27,7 +27,7 @@ def normalize(s):
|
|||
def simple_match(mandataire, first_name, last_name, birthdate):
|
||||
if any([attr not in mandataire for attr in ['prenom', 'nom', 'date_naissance']]):
|
||||
return False
|
||||
if normalize(mandataire['prenom'].replace(',', ' ').split(maxsplit=1)[0]) != normalize(first_name):
|
||||
if normalize(mandataire['prenom'].split(',')[0]) != normalize(first_name):
|
||||
return False
|
||||
if normalize(mandataire['nom']) != normalize(last_name):
|
||||
return False
|
||||
|
@ -41,12 +41,7 @@ def levenshtein_match(mandataire, first_name, last_name, birthdate):
|
|||
return False
|
||||
dist_first_name = min(2, int(len(first_name) / 4))
|
||||
dist_last_name = min(2, int(len(last_name) / 4))
|
||||
if (
|
||||
ldistance(
|
||||
normalize(mandataire['prenom'].replace(',', ' ').split(maxsplit=1)[0]), normalize(first_name)
|
||||
)
|
||||
> dist_first_name
|
||||
):
|
||||
if ldistance(normalize(mandataire['prenom'].split(',')[0]), normalize(first_name)) > dist_first_name:
|
||||
return False
|
||||
if ldistance(normalize(mandataire['nom']), normalize(last_name)) > dist_last_name:
|
||||
return False
|
||||
|
|
|
@ -1,56 +0,0 @@
|
|||
# Generated by Django 3.2.18 on 2023-04-14 17:35
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('base', '0030_resourcelog_base_resour_appname_298cbc_idx'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Resource',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
|
||||
),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
(
|
||||
'api_url',
|
||||
models.URLField(
|
||||
default='https://gw.dgfip.finances.gouv.fr/impotparticulier/1.0',
|
||||
max_length=256,
|
||||
verbose_name='DGFIP API base URL',
|
||||
),
|
||||
),
|
||||
('oauth_username', models.CharField(max_length=128, verbose_name='DGFIP API Username')),
|
||||
('oauth_password', models.CharField(max_length=128, verbose_name='DGFIP API Password')),
|
||||
(
|
||||
'oauth_scopes',
|
||||
models.CharField(max_length=128, verbose_name='DGFIP API Scopes', blank=True),
|
||||
),
|
||||
(
|
||||
'id_teleservice',
|
||||
models.TextField(max_length=128, verbose_name='DGFIP API ID_Teleservice', blank=True),
|
||||
),
|
||||
(
|
||||
'users',
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name='_api_impot_particulier_resource_users_+',
|
||||
related_query_name='+',
|
||||
to='base.ApiUser',
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'API Impot Particulier',
|
||||
},
|
||||
),
|
||||
]
|
|
@ -1,22 +0,0 @@
|
|||
# Generated by Django 3.2.18 on 2023-05-25 09:49
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('api_impot_particulier', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='resource',
|
||||
name='id_teleservice',
|
||||
field=models.TextField(max_length=128, verbose_name='DGFIP API ID_Teleservice'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='resource',
|
||||
name='oauth_scopes',
|
||||
field=models.CharField(max_length=128, verbose_name='DGFIP API Scopes'),
|
||||
),
|
||||
]
|
|
@ -1,306 +0,0 @@
|
|||
# passerelle - uniform access to multiple data sources and services
|
||||
# Copyright (C) 2023 Entr'ouvert
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
import hashlib
|
||||
import uuid
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import requests
|
||||
from django.core.cache import cache
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from passerelle.base.models import BaseResource
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
from passerelle.utils.timeout import Timeout
|
||||
|
||||
|
||||
class ServiceIsDown(APIError):
|
||||
def __init__(self):
|
||||
super().__init__(_('API Impot Particulier service is unavailable'))
|
||||
|
||||
def __str__(self):
|
||||
if self.__context__:
|
||||
return f'{super().__str__()}: {self.__context__}'
|
||||
return super().__str__()
|
||||
|
||||
|
||||
class Resource(BaseResource):
|
||||
api_url = models.URLField(
|
||||
_('DGFIP API base URL'),
|
||||
max_length=256,
|
||||
default='https://gw.dgfip.finances.gouv.fr/impotparticulier/1.0',
|
||||
)
|
||||
oauth_username = models.CharField(_('DGFIP API Username'), max_length=128)
|
||||
oauth_password = models.CharField(_('DGFIP API Password'), max_length=128)
|
||||
oauth_scopes = models.CharField(_('DGFIP API Scopes'), max_length=128)
|
||||
id_teleservice = models.TextField(_('DGFIP API ID_Teleservice'), max_length=128)
|
||||
|
||||
log_requests_errors = False
|
||||
requests_timeout = 30
|
||||
requests_max_retries = {
|
||||
'total': 3,
|
||||
'backoff_factor': 0.5,
|
||||
'allowed_methods': ['GET', 'POST'],
|
||||
# retry after: 0.5, 1.5 and 3.5 seconds
|
||||
'status_forcelist': [413, 429, 503, 504],
|
||||
}
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('API Impot Particulier')
|
||||
|
||||
@classmethod
|
||||
def parse_numero_fiscal(cls, value):
|
||||
value = value.strip().replace(' ', '')
|
||||
if not (value and value.isascii() and value.isdigit()):
|
||||
raise APIError(_('invalid numero_fiscal'))
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def parse_annee_de_revenu(cls, value):
|
||||
try:
|
||||
value = int(value)
|
||||
except (TypeError, ValueError):
|
||||
raise APIError(_('invalid annee_de_revenu'))
|
||||
today = datetime.date.today()
|
||||
if not (0 < today.year - value < 10):
|
||||
raise APIError(_('invalid annee_de_revenu'))
|
||||
return value
|
||||
|
||||
@endpoint(
|
||||
name='spi-situations-ir-assiettes-annrev',
|
||||
description=_('Provides revenue tax situation for a specific year.'),
|
||||
parameters={
|
||||
'numero_fiscal': {
|
||||
'description': _('Tax number of the person'),
|
||||
},
|
||||
'annee_de_revenu': {
|
||||
'description': _('Income year'),
|
||||
},
|
||||
},
|
||||
)
|
||||
def spi_situations_ir_assiettes_annrev(self, request, numero_fiscal, annee_de_revenu):
|
||||
numero_fiscal = self.parse_numero_fiscal(numero_fiscal)
|
||||
annee_de_revenu = self.parse_annee_de_revenu(annee_de_revenu)
|
||||
return {
|
||||
'data': self.get_spi_situations_ir_assiettes_annrev(
|
||||
numero_fiscal=numero_fiscal, annee_de_revenu=annee_de_revenu, timeout=Timeout(20)
|
||||
)
|
||||
}
|
||||
|
||||
def get_spi_situations_ir_assiettes_annrev(self, numero_fiscal, annee_de_revenu, timeout=None):
|
||||
return self.call(
|
||||
name='spi-situations-ir-assiettes-deuxans',
|
||||
endpoint_template='spi/{spi}/situations/ir/assiettes/annrev/{annrev}',
|
||||
timeout=timeout,
|
||||
spi=numero_fiscal,
|
||||
annrev=annee_de_revenu,
|
||||
accept='application/prs.dgfip.part.situations.ir.assiettes.v1+json',
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
name='spi-situations-th-assiettes-principale-annrev',
|
||||
description=_('Provides housing tax situation for a specific year.'),
|
||||
parameters={
|
||||
'numero_fiscal': {
|
||||
'description': _('Tax number of the person'),
|
||||
},
|
||||
'annee_de_revenu': {
|
||||
'description': _('Income year'),
|
||||
},
|
||||
},
|
||||
)
|
||||
def spi_situations_th_assiettes_principale_annrev(self, request, numero_fiscal, annee_de_revenu):
|
||||
numero_fiscal = self.parse_numero_fiscal(numero_fiscal)
|
||||
annee_de_revenu = self.parse_annee_de_revenu(annee_de_revenu)
|
||||
return {
|
||||
'data': self.get_spi_situations_th_assiettes_principale_annrev(
|
||||
numero_fiscal=numero_fiscal, annee_de_revenu=annee_de_revenu, timeout=Timeout(20)
|
||||
)
|
||||
}
|
||||
|
||||
def get_spi_situations_th_assiettes_principale_annrev(self, numero_fiscal, annee_de_revenu, timeout=None):
|
||||
return self.call(
|
||||
name='spi-situations-th-assiettes-principale-deuxans',
|
||||
endpoint_template='spi/{spi}/situations/th/assiettes/principale/annrev/{annrev}',
|
||||
timeout=timeout,
|
||||
spi=numero_fiscal,
|
||||
annrev=annee_de_revenu,
|
||||
accept='application/prs.dgfip.part.situations.th.assiettes.v1+json',
|
||||
)
|
||||
|
||||
def call(self, name, endpoint_template, timeout=None, **kwargs):
|
||||
correlation_id = str(uuid.uuid4().hex)
|
||||
kwargs_formatted = ', '.join(f'{key}={value}' for key, value in kwargs.items())
|
||||
try:
|
||||
data = self.get_tax_data(
|
||||
session=self.requests,
|
||||
base_url=self.api_url,
|
||||
access_token=self._get_access_token(timeout=timeout),
|
||||
correlation_id=correlation_id,
|
||||
endpoint_template=endpoint_template,
|
||||
id_teleservice=self.id_teleservice,
|
||||
timeout=timeout,
|
||||
**kwargs,
|
||||
)
|
||||
except ServiceIsDown as e:
|
||||
self.logger.warning(
|
||||
'%s(%s) failed: %s',
|
||||
name,
|
||||
kwargs_formatted,
|
||||
e,
|
||||
extra={
|
||||
'correlation_id': correlation_id,
|
||||
'id_teleservice': self.id_teleservice,
|
||||
'kwargs': kwargs,
|
||||
},
|
||||
)
|
||||
raise
|
||||
else:
|
||||
self.logger.warning(
|
||||
'%s(%s) success',
|
||||
name,
|
||||
kwargs_formatted,
|
||||
extra={
|
||||
'data': data,
|
||||
'correlation_id': correlation_id,
|
||||
'id_teleservice': self.id_teleservice,
|
||||
'kwargs': kwargs,
|
||||
},
|
||||
)
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def get_tax_data(
|
||||
cls,
|
||||
session,
|
||||
base_url,
|
||||
access_token,
|
||||
correlation_id,
|
||||
endpoint_template,
|
||||
accept,
|
||||
id_teleservice=None,
|
||||
headers=None,
|
||||
timeout=None,
|
||||
**kwargs,
|
||||
):
|
||||
headers = {
|
||||
**(headers or {}),
|
||||
'Authorization': f'Bearer {access_token}',
|
||||
'X-Correlation-ID': correlation_id,
|
||||
'Accept': accept,
|
||||
}
|
||||
if id_teleservice:
|
||||
headers['ID_Teleservice'] = id_teleservice
|
||||
|
||||
endpoint = endpoint_template.format(**kwargs)
|
||||
if not base_url.endswith('/'):
|
||||
base_url += '/'
|
||||
url = urljoin(base_url, endpoint)
|
||||
|
||||
if timeout is not None:
|
||||
timeout = float(timeout)
|
||||
|
||||
# api-impot-particulier error reporting is byzantine, some errors are
|
||||
# accompanied by a 4xx code, some others with a 20x code, some have a
|
||||
# JSON content, other are only identified by a codeapp header on
|
||||
# the response
|
||||
try:
|
||||
response = session.get(url, headers=headers, timeout=timeout)
|
||||
response.raise_for_status()
|
||||
except requests.HTTPError:
|
||||
try:
|
||||
content = response.json()['erreur']
|
||||
except (ValueError, KeyError):
|
||||
try:
|
||||
raise APIError(
|
||||
'api-impot-particulier error', data={'codeapp': response.headers['codeapp']}
|
||||
)
|
||||
except KeyError:
|
||||
pass
|
||||
raise ServiceIsDown
|
||||
raise APIError('api-impot-particulier-error', data=content)
|
||||
except requests.RequestException:
|
||||
raise ServiceIsDown
|
||||
|
||||
if response.status_code != 200:
|
||||
try:
|
||||
content = response.json()['erreur']
|
||||
except (ValueError, KeyError):
|
||||
try:
|
||||
raise APIError(
|
||||
'api-impot-particulier-error', data={'codeapp': response.headers['codeapp']}
|
||||
)
|
||||
except KeyError:
|
||||
raise ServiceIsDown
|
||||
raise APIError('api-impot-particulier error', data=content)
|
||||
|
||||
try:
|
||||
response_data = response.json()
|
||||
except ValueError:
|
||||
raise ServiceIsDown
|
||||
return response_data
|
||||
|
||||
def _get_access_token(self, timeout=None):
|
||||
key = (
|
||||
'dgfip-at-'
|
||||
+ hashlib.sha256(
|
||||
f'{self.oauth_username}-{self.oauth_password}-{self.api_url}'.encode()
|
||||
).hexdigest()
|
||||
)
|
||||
|
||||
access_token = cache.get(key)
|
||||
if not access_token:
|
||||
access_token = self.get_access_token(
|
||||
session=self.requests,
|
||||
base_url=self.api_url,
|
||||
username=self.oauth_username,
|
||||
password=self.oauth_password,
|
||||
scope=self.oauth_scopes,
|
||||
timeout=timeout,
|
||||
)
|
||||
cache.set(key, access_token, 300)
|
||||
return access_token
|
||||
|
||||
@classmethod
|
||||
def get_access_token(cls, session, base_url, username, password, scope, timeout=None):
|
||||
data = {
|
||||
'grant_type': 'client_credentials',
|
||||
}
|
||||
if scope:
|
||||
data['scope'] = scope
|
||||
|
||||
url = urljoin(base_url, '/token')
|
||||
|
||||
if timeout is not None:
|
||||
timeout = float(timeout)
|
||||
|
||||
try:
|
||||
response = session.post(url, data=data, auth=(username, password), timeout=timeout)
|
||||
response.raise_for_status()
|
||||
except requests.RequestException:
|
||||
raise ServiceIsDown
|
||||
try:
|
||||
response_data = response.json()
|
||||
access_token = response_data['access_token']
|
||||
response_data = response.json()
|
||||
except (ValueError, KeyError, TypeError):
|
||||
raise ServiceIsDown
|
||||
return access_token
|
|
@ -17,9 +17,8 @@ KNOWN_ERRORS = {
|
|||
'Pas de droit sur la période demandée pour la prestation sélectionnée et le bénéficiaire choisi',
|
||||
'Pas de droit sur la période demandée pour la prestation sélectionnée.',
|
||||
"Votre quotient familial (Qf) sur cette période est non disponible. Pour plus d'information, contactez-nous.",
|
||||
# API particulier error messages not from the source above
|
||||
# API particulier error message not from the source above
|
||||
'Les paramètres fournis sont incorrects ou ne correspondent pas à un avis',
|
||||
"L'identifiant indiqué n'existe pas, n'est pas connu ou ne comporte aucune information pour cet appel.",
|
||||
},
|
||||
400: {
|
||||
'Absence de code confidentiel. Le document ne peut être édité.',
|
||||
|
@ -31,8 +30,6 @@ KNOWN_ERRORS = {
|
|||
'Il existe des droits pour la prestation sélectionnée sur le dossier et/ou la période demandée',
|
||||
'Il existe des droits pour la prestation sélectionnée sur le dossier et/ou la période demandée (après date du jour)',
|
||||
'L’opérateurs téléphonique» ne propose pas de raccordement SMS avec un prestataire externe (raccordement avec un numéro court). ',
|
||||
# API particulier error messages not from the source above
|
||||
"La référence de l'avis n'est pas correctement formatée",
|
||||
},
|
||||
500: {
|
||||
'Les informations souhaitées sont momentanément indisponibles. Merci de renouveler votre demande ultérieurement.',
|
||||
|
@ -42,7 +39,7 @@ KNOWN_ERRORS = {
|
|||
"Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. Des paramètres manquent.",
|
||||
(
|
||||
"Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. "
|
||||
'La taille du message ne doit pas être supérieure à 160 caractères.'
|
||||
"La taille du message ne doit pas être supérieure à 160 caractères."
|
||||
),
|
||||
(
|
||||
"Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. "
|
||||
|
@ -53,7 +50,7 @@ KNOWN_ERRORS = {
|
|||
"Votre demande n'a pu aboutir en raison d'une erreur technique lié à l'appel au service IMC.",
|
||||
(
|
||||
"Votre demande n’a pu aboutir en raison d'un problème technique lié aux données entrantes du webservice. "
|
||||
'Merci de renouveler votre demande ultérieurement.'
|
||||
"Merci de renouveler votre demande ultérieurement."
|
||||
),
|
||||
},
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('base', '0002_auto_20151009_0326'),
|
||||
]
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api_particulier', '0001_initial'),
|
||||
]
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api_particulier', '0002_auto_20181118_0807'),
|
||||
]
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api_particulier', '0003_auto_20190212_0426'),
|
||||
]
|
||||
|
|
|
@ -5,6 +5,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api_particulier', '0004_auto_20190215_0807'),
|
||||
]
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api_particulier', '0005_auto_20210610_1508'),
|
||||
]
|
||||
|
|
|
@ -1,17 +0,0 @@
|
|||
# Generated by Django 3.2.18 on 2023-12-13 10:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('api_particulier', '0006_api_key_length_1024'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='apiparticulier',
|
||||
name='api_key',
|
||||
field=models.CharField(blank=True, default='', max_length=2048, verbose_name='API key'),
|
||||
),
|
||||
]
|
|
@ -63,7 +63,7 @@ class APIParticulier(BaseResource):
|
|||
choices=[(key, platform['label']) for key, platform in PLATFORMS.items()],
|
||||
)
|
||||
|
||||
api_key = models.CharField(max_length=2048, default='', blank=True, verbose_name=_('API key'))
|
||||
api_key = models.CharField(max_length=1024, default='', blank=True, verbose_name=_('API key'))
|
||||
|
||||
log_requests_errors = False
|
||||
|
||||
|
@ -170,6 +170,7 @@ class APIParticulier(BaseResource):
|
|||
self.save()
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
description=_('Get scopes available'),
|
||||
display_order=1,
|
||||
)
|
||||
|
@ -183,6 +184,7 @@ class APIParticulier(BaseResource):
|
|||
}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
show=False,
|
||||
description=_('Get citizen\'s fiscal informations'),
|
||||
parameters={
|
||||
|
@ -206,6 +208,7 @@ class APIParticulier(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='avis-imposition',
|
||||
perm='can_access',
|
||||
description=_('Get citizen\'s fiscal informations'),
|
||||
parameters={
|
||||
'numero_fiscal': {
|
||||
|
@ -300,6 +303,7 @@ class APIParticulier(BaseResource):
|
|||
return data
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
show=False,
|
||||
description=_('Get family allowances recipient informations'),
|
||||
parameters={
|
||||
|
@ -323,6 +327,7 @@ class APIParticulier(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='situation-familiale',
|
||||
perm='can_access',
|
||||
description=_('Get family allowances recipient informations'),
|
||||
parameters={
|
||||
'code_postal': {
|
||||
|
@ -358,11 +363,6 @@ class APIParticulier(BaseResource):
|
|||
)
|
||||
data['data']['numero_allocataire'] = numero_allocataire
|
||||
data['data']['code_postal'] = code_postal
|
||||
for kind in 'allocataires', 'enfants':
|
||||
for person in data['data'].get(kind) or []:
|
||||
if len(person.get('dateDeNaissance') or '') == 8:
|
||||
birthdate = person['dateDeNaissance']
|
||||
person['dateDeNaissance_iso'] = birthdate[4:] + '-' + birthdate[2:4] + '-' + birthdate[:2]
|
||||
return data
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
|
|
@ -2,6 +2,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('base', '0002_auto_20151009_0326'),
|
||||
]
|
||||
|
|
|
@ -2,6 +2,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('arcgis', '0001_initial'),
|
||||
]
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('arcgis', '0002_auto_20170920_0951'),
|
||||
]
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('arcgis', '0003_auto_20181102_1550'),
|
||||
]
|
||||
|
|
|
@ -8,6 +8,7 @@ import passerelle.utils.templates
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('arcgis', '0004_remove_arcgis_log_level'),
|
||||
]
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('arcgis', '0005_auto_20200310_1517'),
|
||||
]
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('arcgis', '0006_auto_20200401_1025'),
|
||||
]
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import json
|
||||
import string
|
||||
from urllib import parse as urlparse
|
||||
|
||||
|
@ -33,42 +32,6 @@ from passerelle.utils.conversion import num2deg
|
|||
from passerelle.utils.jsonresponse import APIError
|
||||
from passerelle.utils.templates import render_to_string, validate_template
|
||||
|
||||
EDIT_ITEM_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'Item schema',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'geometry': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'x': {'type': 'string'},
|
||||
'y': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
'attributes': {'type': 'object'},
|
||||
},
|
||||
'required': ['attributes'],
|
||||
}
|
||||
|
||||
EDIT_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'Edit payload',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'adds': {
|
||||
'type': 'array',
|
||||
'description': 'Adds object',
|
||||
'items': EDIT_ITEM_SCHEMA,
|
||||
},
|
||||
'updates': {'type': 'array', 'description': 'Updates object', 'items': EDIT_ITEM_SCHEMA},
|
||||
'deletes': {'type': 'array', 'description': 'Deletes object', 'items': {'type': 'string'}},
|
||||
},
|
||||
'minProperties': 1,
|
||||
'unflatten': True,
|
||||
}
|
||||
|
||||
|
||||
class ArcGISError(APIError):
|
||||
pass
|
||||
|
@ -214,6 +177,7 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='mapservice-query',
|
||||
description=_('Map Service Query'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'folder': {
|
||||
'description': _('Folder name'),
|
||||
|
@ -283,6 +247,7 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='featureservice-query',
|
||||
description=_('Feature Service Query'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'folder': {
|
||||
'description': _('Folder name'),
|
||||
|
@ -353,49 +318,9 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
text_fieldname=text_fieldname,
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
name='featureservice-applyedits',
|
||||
description=_('Feature Service Apply Edits'),
|
||||
parameters={
|
||||
'folder': {
|
||||
'description': _('Folder name'),
|
||||
'example_value': 'Specialty',
|
||||
},
|
||||
'service': {
|
||||
'description': _('Service name'),
|
||||
'example_value': 'ESRI_StateCityHighway_USA',
|
||||
},
|
||||
'layer': {
|
||||
'description': _('Layer or table name'),
|
||||
'example_value': '1',
|
||||
},
|
||||
},
|
||||
post={'request_body': {'schema': {'application/json': EDIT_SCHEMA}}},
|
||||
)
|
||||
def featureservice_applyedits(
|
||||
self,
|
||||
request,
|
||||
post_data,
|
||||
service,
|
||||
layer='0',
|
||||
folder='',
|
||||
):
|
||||
# implement "apply edits" feature service
|
||||
# https://developers.arcgis.com/rest/services-reference/enterprise/apply-edits-feature-service-layer-.htm
|
||||
uri = 'services/'
|
||||
if folder:
|
||||
uri += folder + '/'
|
||||
uri = uri + service + '/FeatureServer/' + layer + '/applyEdits'
|
||||
params = {'f': 'pjson'}
|
||||
for key, value in post_data.items():
|
||||
post_data[key] = json.dumps(value)
|
||||
params.update(post_data)
|
||||
return {'data': self.request(urlparse.urljoin(self.base_url, uri), data=params)}
|
||||
|
||||
@endpoint(
|
||||
name='tile',
|
||||
description=_('Tiles layer'),
|
||||
perm='OPEN',
|
||||
pattern=r'^(?P<layer>[\w/]+)/(?P<zoom>\d+)/(?P<tile_x>\d+)/(?P<tile_y>\d+)\.png$',
|
||||
)
|
||||
def tile(self, request, layer, zoom, tile_x, tile_y):
|
||||
|
@ -424,6 +349,7 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
name='q',
|
||||
description=_('Query'),
|
||||
pattern=r'^(?P<query_slug>[\w:_-]+)/$',
|
||||
perm='can_access',
|
||||
show=False,
|
||||
)
|
||||
def q(self, request, query_slug, q=None, full=False, **kwargs):
|
||||
|
|
|
@ -2,6 +2,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('base', '0006_resourcestatus'),
|
||||
]
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('arpege_ecp', '0001_initial'),
|
||||
]
|
||||
|
|
|
@ -75,21 +75,13 @@ class ArpegeECP(BaseResource):
|
|||
@endpoint(
|
||||
name='api',
|
||||
pattern=r'^users/(?P<nameid>\w+)/forms$',
|
||||
example_pattern='users/{nameid}/forms',
|
||||
description=_('Returns user forms'),
|
||||
parameters={
|
||||
'nameid': {'description': _('Publik ID'), 'example_value': 'nameid'},
|
||||
'status': {'description': _('Demands status'), 'example_value': 'pending'},
|
||||
},
|
||||
perm='can_access',
|
||||
description='Returns user forms',
|
||||
)
|
||||
def get_user_forms(self, request, nameid, status='pending'):
|
||||
def get_user_forms(self, request, nameid):
|
||||
access_token = self.get_access_token(nameid)
|
||||
url = urlparse.urljoin(self.webservice_base_url, 'DemandesUsager')
|
||||
params = {'scope': 'data_administratives'}
|
||||
if status == 'pending':
|
||||
params['EtatDemande'] = 'DEPOSEE, ENCRSINSTR' # value for filtering pending forms
|
||||
elif status == 'done':
|
||||
params['EtatDemande'] = 'TRAITEEPOS, TRAITEENEG, TRAITEE' # value for filtering done forms
|
||||
auth = HawkAuth(self.hawk_auth_id, self.hawk_auth_key, ext=access_token)
|
||||
try:
|
||||
response = self.requests.get(url, params=params, auth=auth)
|
||||
|
@ -102,7 +94,7 @@ class ArpegeECP(BaseResource):
|
|||
except ValueError:
|
||||
raise APIError('No JSON content returned: %r' % response.content[:1000])
|
||||
if not result.get('Data'):
|
||||
raise APIError('%s (%s)' % (result.get('LibErreur'), result.get('CodErreur')))
|
||||
raise APIError("%s (%s)" % (result.get('LibErreur'), result.get('CodErreur')))
|
||||
for demand in result['Data']['results']:
|
||||
try:
|
||||
data_administratives = demand['data_administratives']
|
||||
|
|
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue