Compare commits
446 Commits
wip/parsif
...
main
Author | SHA1 | Date |
---|---|---|
Emmanuel Cazenave | 733cc1104f | |
Emmanuel Cazenave | 022d3f2f37 | |
Emmanuel Cazenave | 9d8ed0ae1d | |
Frédéric Péters | 35201c953f | |
Corentin Sechet | 48eeaf01e4 | |
Corentin Sechet | a3862e80fe | |
Frédéric Péters | 09632084c0 | |
Frédéric Péters | feb6fd1428 | |
Frédéric Péters | db9740da15 | |
Emmanuel Cazenave | 38d62092ef | |
Emmanuel Cazenave | c25a371c10 | |
Benjamin Dauvergne | c10b2327dd | |
Corentin Sechet | 3538d06f6f | |
Nicolas Roche | abafd19553 | |
Nicolas Roche | a565716db2 | |
Nicolas Roche | d4bb8059e0 | |
Benjamin Dauvergne | 10590eb9d9 | |
Yann Weber | 23a59e3f0f | |
Yann Weber | 71d40f1230 | |
Yann Weber | d740651fcd | |
Yann Weber | 1fa2c0f9a7 | |
Corentin Sechet | 96c1e49e23 | |
Thomas NOËL | 3729c4605d | |
Yann Weber | 923125e786 | |
Valentin Deniaud | 4a243f72e5 | |
Yann Weber | b5ccfb60ff | |
Yann Weber | a36a0f9e9c | |
Yann Weber | fd7a2d487d | |
Yann Weber | 1e13344dc0 | |
Yann Weber | ed4a4b629e | |
Yann Weber | 82e4d424c7 | |
Serghei Mihai | 9ccf11fbd2 | |
Yann Weber | 764a1997f9 | |
Yann Weber | 04a9840744 | |
Yann Weber | 7b58db7ea7 | |
Yann Weber | 8986d35915 | |
Nicolas Roche | 5d26332646 | |
Yann Weber | 0f75026c9a | |
Yann Weber | f32d06b474 | |
Yann Weber | 08b82d398d | |
Yann Weber | 5e3a2d23d9 | |
Yann Weber | c729600cfc | |
Yann Weber | 665c16bca2 | |
Nicolas Roche | 5f86102711 | |
Corentin Sechet | 47f925d62c | |
Corentin Sechet | 6369875b12 | |
Corentin Sechet | 7330fce543 | |
Corentin Sechet | 6fc97253ff | |
Corentin Sechet | 631fd54f30 | |
Corentin Sechet | 8e7056f4de | |
Corentin Sechet | 99662694e6 | |
Frédéric Péters | f7f848af82 | |
Emmanuel Cazenave | b35b0fccd3 | |
Emmanuel Cazenave | 1570ba1f3f | |
Emmanuel Cazenave | ea1a5a87d6 | |
Corentin Sechet | 96f992e11d | |
Nicolas Roche | fd91664b1e | |
Corentin Sechet | 3f584e13a7 | |
Benjamin Dauvergne | 668ddf08e5 | |
Benjamin Dauvergne | b9cc850b7f | |
Benjamin Dauvergne | b2aa4c72f1 | |
Frédéric Péters | d416793d64 | |
Frédéric Péters | 51d2b4b314 | |
Corentin Sechet | c9001cdda4 | |
Corentin Sechet | 2f50e4b207 | |
Benjamin Dauvergne | 91b92aeb44 | |
Benjamin Dauvergne | 628b38fe5f | |
Benjamin Dauvergne | 8d6f202b16 | |
Benjamin Dauvergne | 68764cd9c2 | |
Benjamin Dauvergne | 33b39c52f1 | |
Corentin Sechet | 0fa387d6d0 | |
Corentin Sechet | 9ed59bc94d | |
Benjamin Dauvergne | 7c9e487482 | |
Corentin Sechet | 2842439ce1 | |
Corentin Sechet | 4738850fcc | |
Serghei Mihai | 38d3fbbf4e | |
Corentin Sechet | 36dfa9508e | |
Serghei Mihai | cd0f441d3b | |
Benjamin Dauvergne | 97e77b55c9 | |
Nicolas Roche | c3b5707ea6 | |
Benjamin Dauvergne | d05217cdc0 | |
Benjamin Dauvergne | 5b2f2d5b97 | |
Benjamin Dauvergne | 4465d64903 | |
Benjamin Dauvergne | 0b4de669fd | |
Benjamin Dauvergne | a15a11ec4a | |
Benjamin Dauvergne | 9a487dde91 | |
Nicolas Roche | 9a158a66d1 | |
Benjamin Dauvergne | a1d4c44ac4 | |
Benjamin Dauvergne | f2b64b6ebf | |
Benjamin Dauvergne | 45f6ee9e8d | |
Benjamin Dauvergne | 3c30a76f3e | |
Benjamin Dauvergne | 3ca8e98485 | |
Emmanuel Cazenave | 8f21df1dc7 | |
Emmanuel Cazenave | 4258520491 | |
Emmanuel Cazenave | 0f1117f483 | |
Nicolas Roche | eac67cb852 | |
Nicolas Roche | f841e98049 | |
Nicolas Roche | ad543177f1 | |
Nicolas Roche | 1a30653192 | |
Nicolas Roche | 33c756e76a | |
Benjamin Dauvergne | 94e60a35b2 | |
Benjamin Dauvergne | 3ad6c89068 | |
Benjamin Dauvergne | df0084d202 | |
Nicolas Roche | 13d30a8049 | |
Nicolas Roche | b5aa2bb2b2 | |
Nicolas Roche | 509ae33314 | |
Nicolas Roche | c3557f628a | |
Nicolas Roche | 93c6224357 | |
Nicolas Roche | e2e591afeb | |
Nicolas Roche | 94057f7a0f | |
Nicolas Roche | 86ba7d6a55 | |
Frédéric Péters | 129e2a5af3 | |
Frédéric Péters | 1c7f6c2557 | |
Nicolas Roche | 8459ef11a3 | |
Nicolas Roche | 8c9edcd332 | |
Lauréline Guérin | f69f6281ab | |
Nicolas Roche | 72b3315ae4 | |
Nicolas Roche | 08f347edf3 | |
Nicolas Roche | 28d2ea1ba9 | |
Frédéric Péters | 9c695acf63 | |
Benjamin Dauvergne | bbf7aabb30 | |
Benjamin Dauvergne | ee1baf8b50 | |
Benjamin Dauvergne | 37536df9d1 | |
Thomas NOËL | 514c3f1995 | |
Thomas NOËL | dd87030c50 | |
Nicolas Roche | 104f96b61c | |
Thomas NOËL | 6f3516297e | |
Thomas NOËL | 43e2d9222a | |
Thomas NOËL | d0ecf8af77 | |
Thomas NOËL | 31a0828d6d | |
Thomas NOËL | 020a402a96 | |
Nicolas Roche | 0010095146 | |
Corentin Sechet | b377b87d5d | |
Thomas NOËL | 52886c216c | |
Thomas NOËL | b8fc9716a4 | |
Thomas NOËL | 40c3c6affb | |
Nicolas Roche | ec8dd0a43c | |
Nicolas Roche | 5ad847df95 | |
Nicolas Roche | 8e6f61ceb7 | |
Nicolas Roche | 13fe6411eb | |
Thomas NOËL | 6101988404 | |
Serghei Mihai | 8b3b8edfda | |
Pierre Ducroquet | 52981183ff | |
Benjamin Dauvergne | afab9d49a1 | |
Nicolas Roche | e592c33021 | |
Nicolas Roche | ad4b9de490 | |
Benjamin Dauvergne | b4d637249a | |
Benjamin Dauvergne | f15d802b11 | |
Emmanuel Cazenave | 2e167a3466 | |
Frédéric Péters | f513f2451d | |
Frédéric Péters | b9939892b8 | |
Benjamin Dauvergne | 7de7cd8b3f | |
Benjamin Dauvergne | c247197c6e | |
Benjamin Dauvergne | bfd1fcc2f6 | |
Nicolas Roche | 905e3b141f | |
Nicolas Roche | e8122d29eb | |
Serghei Mihai | 4c5204bd2f | |
Corentin Sechet | 07619bc012 | |
Corentin Sechet | b516b7b66c | |
Corentin Sechet | 92768f5852 | |
Corentin Sechet | d69e4df328 | |
Corentin Sechet | 2711f5c615 | |
Benjamin Dauvergne | cd08a2068c | |
Benjamin Dauvergne | 66e99362ef | |
Benjamin Dauvergne | 00443f8629 | |
Corentin Sechet | 230d424571 | |
Corentin Sechet | a8e2223c50 | |
Corentin Sechet | 7951510aa1 | |
Thomas NOËL | 08fa0fad21 | |
Benjamin Dauvergne | 320013ac68 | |
Benjamin Dauvergne | 4338ee9cd7 | |
Corentin Sechet | 32d3dd01bc | |
Corentin Sechet | 7314fa224c | |
Corentin Sechet | 82e9018865 | |
Corentin Sechet | a51c49a865 | |
Corentin Sechet | 1e12dae71b | |
Thomas NOËL | e506facfd6 | |
Thomas NOËL | 0a28034137 | |
Nicolas Roche | 81f58cad59 | |
Emmanuel Cazenave | 2a73e4dfb3 | |
Nicolas Roche | 979e531b3a | |
Nicolas Roche | 5cd1e3aacc | |
Nicolas Roche | aa9585071a | |
Nicolas Roche | 923427783c | |
Nicolas Roche | 34ac701200 | |
Nicolas Roche | 1b0c842d48 | |
Nicolas Roche | d0f4b9ecf9 | |
Nicolas Roche | a7ff9bbc4a | |
Nicolas Roche | b7b50717ca | |
Nicolas Roche | 6c4fc4152d | |
Nicolas Roche | e59765eaf7 | |
Thomas NOËL | 8bb8f2c1df | |
Thomas NOËL | e2a45ea01b | |
Thomas NOËL | 11d3bd5a9b | |
Thomas NOËL | 2162e9d08d | |
Thomas NOËL | 264550e363 | |
Thomas NOËL | ba58f183ed | |
Thomas NOËL | f564e71d5d | |
Thomas NOËL | 6f7acc1489 | |
Serghei Mihai | 62c0b91ac4 | |
Emmanuel Cazenave | 2b0842eb03 | |
Emmanuel Cazenave | d315580294 | |
Serghei Mihai | 140863373f | |
Serghei Mihai | fa50ff9129 | |
Emmanuel Cazenave | 0154defcce | |
Emmanuel Cazenave | f336d7a952 | |
Thomas NOËL | c148f6ae03 | |
Nicolas Roche | bda1eba253 | |
Emmanuel Cazenave | 8892a97435 | |
Serghei Mihai | 14a6fb1aed | |
Benjamin Dauvergne | f63e250e0d | |
Serghei Mihai | 4789f1e1ff | |
Serghei Mihai | 2bbc835787 | |
Serghei Mihai | 94184d9c5e | |
Corentin Sechet | 76f3860ad2 | |
Corentin Sechet | 3d5ec0268c | |
Nicolas Roche | f2652bac36 | |
Corentin Sechet | c598673e3d | |
Corentin Sechet | fe1f40cc7d | |
Corentin Sechet | a3db9b1e35 | |
Thomas NOËL | 92f5b5f26b | |
Frédéric Péters | d6b87039cb | |
Corentin Sechet | 9d67f8587a | |
Benjamin Dauvergne | a9f2956db7 | |
Benjamin Dauvergne | 8266740b52 | |
Lauréline Guérin | 117743e0a6 | |
Nicolas Roche | 49226aca44 | |
Nicolas Roche | bc62bdc3fd | |
Nicolas Roche | 4bd7032998 | |
Nicolas Roche | e1b3ab7646 | |
Frédéric Péters | 7a671f7e74 | |
Nicolas Roche | 441ac49c58 | |
Nicolas Roche | bac28e933c | |
Benjamin Dauvergne | b497988bf5 | |
Emmanuel Cazenave | 898a14f821 | |
Emmanuel Cazenave | ef0b518aba | |
Emmanuel Cazenave | bf2610b4c5 | |
Paul Marillonnet | c56c0676de | |
Corentin Sechet | 649c1c05a8 | |
Benjamin Dauvergne | a192a953b9 | |
Benjamin Dauvergne | faf3e4692e | |
Nicolas Roche | 60bcc9d82e | |
Corentin Sechet | 0d6e180fef | |
Corentin Sechet | 02300e612e | |
Corentin Sechet | 520e6a818b | |
Corentin Sechet | 199075ed80 | |
Corentin Sechet | 5fe9b9a1e6 | |
Corentin Sechet | aed4c44107 | |
Corentin Sechet | 62e452c31e | |
Nicolas Roche | fd1c591ab3 | |
Nicolas Roche | 40287181cc | |
Serghei Mihai | 4ccaad6d35 | |
Nicolas Roche | 96b0777324 | |
Serghei Mihai | 9e64fa5c9b | |
Serghei Mihai | 2916fb7c32 | |
Emmanuel Cazenave | e549db488f | |
Emmanuel Cazenave | 16fc487119 | |
Corentin Sechet | 9d78d8fcf3 | |
Corentin Sechet | 8cdf3dcae2 | |
Nicolas Roche | 659ba18a00 | |
Nicolas Roche | 7cbd27afd3 | |
Nicolas Roche | 1a37984298 | |
Nicolas Roche | a4805681a1 | |
Nicolas Roche | eacfb506d6 | |
Frédéric Péters | 306cba2423 | |
Valentin Deniaud | bcfc02d94a | |
Valentin Deniaud | 40142de8d2 | |
Valentin Deniaud | 6e7ac8c145 | |
Lauréline Guérin | 0a46addb73 | |
Frédéric Péters | a19073c7e9 | |
Frédéric Péters | 7178f7c4d0 | |
Nicolas Roche | fac61176a3 | |
Lauréline Guérin | 4f136ee898 | |
Nicolas Roche | 06f22a03f8 | |
Frédéric Péters | ab2f8a847b | |
Thomas NOËL | c38ee2913c | |
Corentin Sechet | 100064eba8 | |
Corentin Sechet | 5157fde445 | |
Serghei Mihai | 8fa7d79b1e | |
Serghei Mihai | c83228e375 | |
Nicolas Roche | 3cee8e4350 | |
Lauréline Guérin | 2775202bd8 | |
Nicolas Roche | 64b25c7c73 | |
Nicolas Roche | a8bab7fa01 | |
Nicolas Roche | 16fd6aae41 | |
Nicolas Roche | 56e2a4b1d9 | |
Nicolas Roche | 122a0f6c22 | |
Nicolas Roche | 569159a95f | |
Nicolas Roche | 2b0612d5ef | |
Nicolas Roche | 25420ca260 | |
Nicolas Roche | bd388e42a6 | |
Valentin Deniaud | 4ce087b6ef | |
Valentin Deniaud | 75db45cdfa | |
Valentin Deniaud | 5cda735517 | |
Serghei Mihai | ccb53be16e | |
Serghei Mihai | 9626f03f34 | |
Emmanuel Cazenave | de3d69e2d2 | |
Emmanuel Cazenave | 84f1b2e728 | |
Emmanuel Cazenave | d199eb9de7 | |
Nicolas Roche | 9386398863 | |
Lauréline Guérin | f928a10fc5 | |
Nicolas Roche | 0616f216bf | |
Benjamin Dauvergne | 5d05b38653 | |
Thomas NOËL | 1afe1a8649 | |
Emmanuel Cazenave | 34e0b6f8d8 | |
Emmanuel Cazenave | a16dc0c83a | |
Lauréline Guérin | de81517bb4 | |
Emmanuel Cazenave | 5ce8d34fa5 | |
Nicolas Roche | 0b8730b9ba | |
Emmanuel Cazenave | 60d2277b55 | |
Emmanuel Cazenave | faebc78066 | |
Emmanuel Cazenave | 7212c9056d | |
Nicolas Roche | 0e07b8fca7 | |
Frédéric Péters | f498f8f32a | |
Frédéric Péters | d59eaa8ab3 | |
Frédéric Péters | 85f6e24aab | |
Nicolas Roche | 9dee19e493 | |
Nicolas Roche | 6238b21727 | |
Nicolas Roche | 451cf508ce | |
Nicolas Roche | 5a1046c7d2 | |
Nicolas Roche | 5e94eb86a3 | |
Nicolas Roche | ad752230d6 | |
Nicolas Roche | 16ab7c0a77 | |
Nicolas Roche | 4d58d2c80b | |
Nicolas Roche | 6147e497bc | |
Nicolas Roche | c22981630f | |
Nicolas Roche | 16c3bbd120 | |
Nicolas Roche | 37fcc2d65a | |
Nicolas Roche | 1257eea8d2 | |
Nicolas Roche | f5dc0f4fb2 | |
Nicolas Roche | 3946028e53 | |
Nicolas Roche | 998e1c1208 | |
Nicolas Roche | aa99a0d826 | |
Nicolas Roche | e2047aa318 | |
Nicolas Roche | 6546c7ac63 | |
Nicolas Roche | 2083b70610 | |
Nicolas Roche | a15c80765b | |
Nicolas Roche | bcbe12679e | |
Nicolas Roche | ae5681b0e7 | |
Nicolas Roche | 03cbdf578f | |
Nicolas Roche | e873bfaaa8 | |
Nicolas Roche | 59989d562b | |
Nicolas Roche | aa250342da | |
Nicolas Roche | 59955c60db | |
Nicolas Roche | 72b3cc8a87 | |
Nicolas Roche | ea1c2b34bb | |
Lauréline Guérin | 95ba4d5f0e | |
Lauréline Guérin | fb01b9a9ec | |
Lauréline Guérin | 468e5309a9 | |
Thomas NOËL | 002af7c243 | |
Thomas NOËL | f7739d1aa2 | |
Nicolas Roche | 4e5ec54b26 | |
Nicolas Roche | 61308407de | |
Nicolas Roche | ec1c4886fd | |
Frédéric Péters | 2618463abb | |
Nicolas Roche | c8fb63fe3e | |
Thomas NOËL | 7a21a3e50c | |
Emmanuel Cazenave | d479819f50 | |
Emmanuel Cazenave | 415e9f8a9e | |
Serghei Mihai | 166d58591a | |
Nicolas Roche | 2cac256517 | |
Benjamin Dauvergne | 62ed945d62 | |
Benjamin Dauvergne | dcb772fdbd | |
Benjamin Dauvergne | 7959ec9a3c | |
Frédéric Péters | 65d3f390f3 | |
Emmanuel Cazenave | 65409f2070 | |
Nicolas Roche | 3f69bdb447 | |
Nicolas Roche | 9d0fc45957 | |
Nicolas Roche | 29ce646989 | |
Frédéric Péters | 9a892a0e77 | |
Frédéric Péters | e4a9d16719 | |
Corentin Sechet | 29b8775a16 | |
Nicolas Roche | d176d9fc4b | |
Nicolas Roche | 8df0c9ec11 | |
Nicolas Roche | d21669a250 | |
Emmanuel Cazenave | b74e848dbd | |
Emmanuel Cazenave | 0c06086585 | |
Emmanuel Cazenave | 6b74e9a632 | |
Emmanuel Cazenave | 7102c3150a | |
Frédéric Péters | 9ff69633a3 | |
Frédéric Péters | 6194728fb3 | |
Frédéric Péters | 1ab81c200b | |
Thomas NOËL | 2277fcdd23 | |
Corentin Sechet | 54dbbc3148 | |
Frédéric Péters | 6b432122d3 | |
Thomas NOËL | 3ba866a275 | |
Thomas Jund | 3934030677 | |
Corentin Sechet | e2ce17f701 | |
Corentin Sechet | 7395fa5560 | |
Nicolas Roche | 816da0f6b6 | |
Nicolas Roche | be4a65b6be | |
Nicolas Roche | 347d09db89 | |
Nicolas Roche | 0276de78c2 | |
Nicolas Roche | 80fc536e2b | |
Nicolas Roche | 3dab63a0f3 | |
Nicolas Roche | bd9270a8ad | |
Emmanuel Cazenave | 31efc19163 | |
Lauréline Guérin | 9a4f57612e | |
Lauréline Guérin | e0ed5cc1c9 | |
Emmanuel Cazenave | ab46f17856 | |
Nicolas Roche | 0b81087341 | |
Nicolas Roche | f71891abd7 | |
Nicolas Roche | 731148917e | |
Nicolas Roche | c413b5738f | |
Emmanuel Cazenave | fd09fb2fd7 | |
Benjamin Dauvergne | 84cd51957e | |
Nicolas Roche | 8e215185ec | |
Nicolas Roche | 4cdfeb47b0 | |
Nicolas Roche | 896391c718 | |
Nicolas Roche | 86ac566bbb | |
Nicolas Roche | 135cdbf46a | |
Thomas NOËL | d87dd7b107 | |
Frédéric Péters | 23480ce819 | |
Frédéric Péters | 1bc79d7312 | |
Nicolas Roche | bdd68dc6e8 | |
Serghei Mihai | f3a7f7f460 | |
Serghei Mihai | f704763565 | |
Emmanuel Cazenave | 822a0d83b4 | |
Benjamin Dauvergne | 0a6733f070 | |
Benjamin Dauvergne | 4d89c476bb | |
Benjamin Dauvergne | 96413bd5d9 | |
Benjamin Dauvergne | 7defa59ccc | |
Benjamin Dauvergne | 9f5927daa5 | |
Benjamin Dauvergne | a20835e118 | |
Nicolas Roche | fc9444cd98 | |
Nicolas Roche | 6f562d6e10 | |
Nicolas Roche | acd0ba843c | |
Nicolas Roche | 5662aa069d | |
Nicolas Roche | 06b640731f | |
Nicolas Roche | 5243e328a4 | |
Nicolas Roche | bbc8e1cb5b | |
Nicolas Roche | 1be01a198e | |
Nicolas Roche | c7d33287c5 | |
Nicolas Roche | 483268c636 | |
Nicolas Roche | be6d3df42e | |
Nicolas Roche | db11bfcbb7 | |
Nicolas Roche | 1d21ac784c | |
Nicolas Roche | 87f982e8a4 | |
Nicolas Roche | 18825c057e | |
Nicolas Roche | 36940933d9 | |
Nicolas Roche | 9535c6c68f | |
Nicolas Roche | 2d208a9b96 | |
Nicolas Roche | d4c0214ac7 | |
Nicolas Roche | 87b97a417b | |
Frédéric Péters | 8e095b9dfa | |
Thomas NOËL | a75835584b |
|
@ -8,3 +8,5 @@ d2c0be039649febded68d9d04f745cd18b2b2e03
|
|||
989fb5271967e8e87fd57837dd6d8cfe932e7ebe
|
||||
# misc: apply djhtml (#69422)
|
||||
6da81964bd91b5656364357ec06776fed3529c8a
|
||||
# misc: apply double-quote-string-fixer (#79788)
|
||||
40142de8d2d9885f7a57f4b0f5ab1a593e13aaca
|
||||
|
|
|
@ -12,5 +12,7 @@ passerelle.egg-info/
|
|||
coverage.xml
|
||||
junit-py*.xml
|
||||
.sass-cache/
|
||||
passerelle/static/css/style.css
|
||||
passerelle/static/css/style.css.map
|
||||
passerelle/**/static/**/css/style.css
|
||||
passerelle/**/static/**/css/style.css.map
|
||||
node_modules/
|
||||
coverage/
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: double-quote-string-fixer
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.3.1
|
||||
hooks:
|
||||
|
@ -27,6 +31,6 @@ repos:
|
|||
- id: djhtml
|
||||
args: ['--tabwidth', '2']
|
||||
- repo: https://git.entrouvert.org/pre-commit-debian.git
|
||||
rev: v0.1
|
||||
rev: v0.3
|
||||
hooks:
|
||||
- id: pre-commit-debian
|
||||
|
|
|
@ -11,19 +11,34 @@ pipeline {
|
|||
RAND_TEST = "${Math.abs(new Random().nextInt(max+1))}"
|
||||
}
|
||||
stages {
|
||||
stage('Unit Tests') {
|
||||
steps {
|
||||
sh "NUMPROCESSES=6 RAND_TEST=${env.RAND_TEST} tox -rv"
|
||||
}
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
utils = new Utils()
|
||||
utils.publish_coverage('coverage.xml')
|
||||
utils.publish_coverage_native('index.html')
|
||||
utils.publish_pylint('pylint.out')
|
||||
stage('Tests (in parallel)') {
|
||||
failFast true
|
||||
parallel {
|
||||
stage('Unit Tests (pytest)') {
|
||||
steps {
|
||||
sh "NUMPROCESSES=12 RAND_TEST=${env.RAND_TEST} tox -rv"
|
||||
}
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
utils = new Utils()
|
||||
utils.publish_coverage('coverage.xml')
|
||||
utils.publish_coverage_native('index.html')
|
||||
utils.publish_pylint('pylint.out')
|
||||
}
|
||||
mergeJunitResults()
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Unit Tests (vitest)') {
|
||||
steps {
|
||||
sh "NUMPROCESSES=12 RAND_TEST=${env.RAND_TEST} tox -rv -e vitest"
|
||||
}
|
||||
}
|
||||
stage('Linter (pylint)') {
|
||||
steps {
|
||||
sh "NUMPROCESSES=12 RAND_TEST=${env.RAND_TEST} tox -rv -e pylint"
|
||||
}
|
||||
mergeJunitResults()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -39,9 +54,9 @@ pipeline {
|
|||
'''
|
||||
).trim()
|
||||
if (env.GIT_BRANCH == 'main' || env.GIT_BRANCH == 'origin/main') {
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye ${SHORT_JOB_NAME}"
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye,bookworm ${SHORT_JOB_NAME}"
|
||||
} else if (env.GIT_BRANCH.startsWith('hotfix/')) {
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye --branch ${env.GIT_BRANCH} --hotfix ${SHORT_JOB_NAME}"
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye,bookworm --branch ${env.GIT_BRANCH} --hotfix ${SHORT_JOB_NAME}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
15
README
15
README
|
@ -126,3 +126,18 @@ django-jsonresponse (https://github.com/jjay/django-jsonresponse)
|
|||
# Files: passerelle/utils/jsonresponse.py
|
||||
# Copyright (c) 2012 Yasha Borevich <j.borevich@gmail.com>
|
||||
# Licensed under the BSD license
|
||||
|
||||
tweetnacl-js (https://github.com/dchest/tweetnacl-js)
|
||||
# Files: passerelle/apps/qrcode/static/qrcode/js/nacl.min.js
|
||||
# Copyright: https://github.com/dchest/tweetnacl-js/blob/master/AUTHORS.md
|
||||
# Licensed under the Unlicense license (public domain)
|
||||
|
||||
zxing-browser (https://github.com/zxing-js/browser/)
|
||||
# Files: passerelle/apps/qrcode/static/qrcode/js/zxing-browser.min.js
|
||||
# Copyright: (c) 2018 ZXing for JS
|
||||
# Licensed under the MIT license.
|
||||
|
||||
RemixIcon (https://github.com/Remix-Design/RemixIcon)
|
||||
# Files: passerelle/apps/qrcode/static/qrcode/img/favicon.ico
|
||||
# Copyright (c) 2020 RemixIcon.com
|
||||
# Licensed under the Apache License Version 2.0
|
||||
|
|
|
@ -16,7 +16,9 @@ Architecture: all
|
|||
Depends: ghostscript,
|
||||
pdftk,
|
||||
poppler-utils,
|
||||
python3-caldav,
|
||||
python3-cmislib,
|
||||
python3-cryptography,
|
||||
python3-dateutil,
|
||||
python3-distutils,
|
||||
python3-django (>= 2:3.2),
|
||||
|
@ -43,6 +45,7 @@ Depends: ghostscript,
|
|||
python3-uwsgidecorators,
|
||||
python3-vobject,
|
||||
python3-xmlschema,
|
||||
python3-xmltodict,
|
||||
python3-zeep (>= 3.2),
|
||||
${misc:Depends},
|
||||
${python3:Depends},
|
||||
|
@ -60,8 +63,9 @@ Depends: adduser,
|
|||
uwsgi,
|
||||
uwsgi-plugin-python3,
|
||||
${misc:Depends},
|
||||
Recommends: memcached, nginx
|
||||
Suggests: postgresql
|
||||
Breaks: python-passerelle (<<5.75.post9)
|
||||
Replaces: python-passerelle (<<5.75.post9)
|
||||
Recommends: memcached,
|
||||
nginx,
|
||||
Suggests: postgresql,
|
||||
Breaks: python-passerelle (<<5.75.post9),
|
||||
Replaces: python-passerelle (<<5.75.post9),
|
||||
Description: Uniform access to multiple data sources and services
|
||||
|
|
|
@ -4,6 +4,7 @@ After=network.target postgresql.service
|
|||
Wants=postgresql.service
|
||||
|
||||
[Service]
|
||||
SyslogIdentifier=uwsgi/%p
|
||||
Environment=PASSERELLE_SETTINGS_FILE=/usr/lib/%p/debian_config.py
|
||||
Environment=PASSERELLE_WSGI_TIMEOUT=120
|
||||
Environment=PASSERELLE_WSGI_WORKERS=5
|
||||
|
|
|
@ -18,6 +18,7 @@ spooler-python-import = passerelle.utils.spooler
|
|||
spooler-max-tasks = 20
|
||||
|
||||
# every five minutes
|
||||
unique-cron = -5 -1 -1 -1 -1 /usr/bin/passerelle-manage tenant_command cron --all-tenants every5min
|
||||
unique-cron = -5 -1 -1 -1 -1 /usr/bin/passerelle-manage tenant_command cron --all-tenants availability
|
||||
unique-cron = -5 -1 -1 -1 -1 /usr/bin/passerelle-manage tenant_command cron --all-tenants jobs
|
||||
# hourly
|
||||
|
|
|
@ -2,23 +2,23 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--url", help="Url of a passerelle Caluire Axel connector instance")
|
||||
parser.addoption("--nameid", help="Publik Name ID")
|
||||
parser.addoption("--firstname", help="first name of a user")
|
||||
parser.addoption("--lastname", help="Last name of a user")
|
||||
parser.addoption("--family", help="Family ID")
|
||||
parser.addoption('--url', help='Url of a passerelle Caluire Axel connector instance')
|
||||
parser.addoption('--nameid', help='Publik Name ID')
|
||||
parser.addoption('--firstname', help='first name of a user')
|
||||
parser.addoption('--lastname', help='Last name of a user')
|
||||
parser.addoption('--family', help='Family ID')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption("--url")
|
||||
return request.config.getoption('--url')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def user(request):
|
||||
return {
|
||||
'name_id': request.config.getoption("--nameid"),
|
||||
'first_name': request.config.getoption("--firstname"),
|
||||
'last_name': request.config.getoption("--lastname"),
|
||||
'family': request.config.getoption("--family"),
|
||||
'name_id': request.config.getoption('--nameid'),
|
||||
'first_name': request.config.getoption('--firstname'),
|
||||
'last_name': request.config.getoption('--lastname'),
|
||||
'family': request.config.getoption('--family'),
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ def test_link(conn, user):
|
|||
'NOM': user['last_name'],
|
||||
'PRENOM': user['first_name'],
|
||||
}
|
||||
print("Creating link with the following payload:")
|
||||
print('Creating link with the following payload:')
|
||||
pprint.pprint(payload)
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
|
@ -21,7 +21,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print("GET family info")
|
||||
print('GET family info')
|
||||
url = conn + '/family_info?NameID=%s' % name_id
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -30,7 +30,7 @@ def test_link(conn, user):
|
|||
assert data['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print("GET children info")
|
||||
print('GET children info')
|
||||
url = conn + '/children_info?NameID=%s' % (name_id)
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -40,7 +40,7 @@ def test_link(conn, user):
|
|||
print('\n')
|
||||
|
||||
for child in data['data']['MEMBRE']:
|
||||
print("GET child info")
|
||||
print('GET child info')
|
||||
url = conn + '/child_info?NameID=%s&idpersonne=%s' % (name_id, child['IDENT'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -49,7 +49,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print("and GET school info")
|
||||
print('and GET school info')
|
||||
url = conn + '/child_schooling_info?NameID=%s&idpersonne=%s&schooling_date=%s' % (
|
||||
name_id,
|
||||
child['IDENT'],
|
||||
|
@ -62,7 +62,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print("and GET activities info")
|
||||
print('and GET activities info')
|
||||
url = conn + '/child_activities_info?NameID=%s&idpersonne=%s&schooling_date=%s' % (
|
||||
name_id,
|
||||
child['IDENT'],
|
||||
|
@ -75,7 +75,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print("GET school list")
|
||||
print('GET school list')
|
||||
url = conn + '/school_list'
|
||||
payload = {
|
||||
'num': data['data']['RESPONSABLE1']['ADRESSE']['NORUE'],
|
||||
|
@ -92,7 +92,7 @@ def test_link(conn, user):
|
|||
print('\n')
|
||||
return
|
||||
|
||||
print("Deleting link")
|
||||
print('Deleting link')
|
||||
url = conn + '/unlink?NameID=%s' % name_id
|
||||
resp = requests.post(url)
|
||||
resp.raise_for_status()
|
||||
|
|
|
@ -5,25 +5,25 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--cmis-connector-url", help="Url of a passerelle CMIS connector instance")
|
||||
parser.addoption("--cmis-endpoint", help="Url of a passerelle CMIS endpoint")
|
||||
parser.addoption("--cmis-username", help="Username for the CMIS endpoint")
|
||||
parser.addoption("--cmis-password", help="Password for the CMIS endpoint")
|
||||
parser.addoption("--preserve-tree", action="store_true", default=False, help="Preserve test directory")
|
||||
parser.addoption('--cmis-connector-url', help='Url of a passerelle CMIS connector instance')
|
||||
parser.addoption('--cmis-endpoint', help='Url of a passerelle CMIS endpoint')
|
||||
parser.addoption('--cmis-username', help='Username for the CMIS endpoint')
|
||||
parser.addoption('--cmis-password', help='Password for the CMIS endpoint')
|
||||
parser.addoption('--preserve-tree', action='store_true', default=False, help='Preserve test directory')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def cmisclient(request):
|
||||
return cmislib.CmisClient(
|
||||
request.config.getoption("--cmis-endpoint"),
|
||||
request.config.getoption("--cmis-username"),
|
||||
request.config.getoption("--cmis-password"),
|
||||
request.config.getoption('--cmis-endpoint'),
|
||||
request.config.getoption('--cmis-username'),
|
||||
request.config.getoption('--cmis-password'),
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def cmis_connector(request):
|
||||
return request.config.getoption("--cmis-connector-url")
|
||||
return request.config.getoption('--cmis-connector-url')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
|
@ -31,6 +31,6 @@ def cmis_tmpdir(cmisclient, request):
|
|||
path = 'test-%s' % random.randint(0, 10000)
|
||||
folder = cmisclient.defaultRepository.rootFolder.createFolder(path)
|
||||
yield folder.properties['cmis:path']
|
||||
preserve_tree = request.config.getoption("--preserve-tree")
|
||||
preserve_tree = request.config.getoption('--preserve-tree')
|
||||
if not preserve_tree:
|
||||
folder.deleteTree()
|
||||
|
|
|
@ -10,7 +10,7 @@ SPECIAL_CHARS = '!#$%&+-^_`;[]{}+='
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"path,file_name",
|
||||
'path,file_name',
|
||||
[
|
||||
('', 'some.file'),
|
||||
('/toto', 'some.file'),
|
||||
|
@ -31,8 +31,8 @@ def test_uploadfile(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, monkeypatch
|
|||
response = requests.post(
|
||||
url,
|
||||
json={
|
||||
"path": cmis_tmpdir + path,
|
||||
"file": {"content": file_b64_content, "filename": file_name, "content_type": "image/jpeg"},
|
||||
'path': cmis_tmpdir + path,
|
||||
'file': {'content': file_b64_content, 'filename': file_name, 'content_type': 'image/jpeg'},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
@ -59,8 +59,8 @@ def test_uploadfile_conflict(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, mo
|
|||
response = requests.post(
|
||||
url,
|
||||
json={
|
||||
"path": cmis_tmpdir + '/uploadconflict',
|
||||
"file": {"content": file_b64_content, "filename": 'some.file', "content_type": "image/jpeg"},
|
||||
'path': cmis_tmpdir + '/uploadconflict',
|
||||
'file': {'content': file_b64_content, 'filename': 'some.file', 'content_type': 'image/jpeg'},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
@ -70,11 +70,11 @@ def test_uploadfile_conflict(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, mo
|
|||
response = requests.post(
|
||||
url,
|
||||
json={
|
||||
"path": cmis_tmpdir + '/uploadconflict',
|
||||
"file": {"content": file_b64_content, "filename": 'some.file', "content_type": "image/jpeg"},
|
||||
'path': cmis_tmpdir + '/uploadconflict',
|
||||
'file': {'content': file_b64_content, 'filename': 'some.file', 'content_type': 'image/jpeg'},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
resp_data = response.json()
|
||||
assert resp_data['err'] == 1
|
||||
assert resp_data['err_desc'].startswith("update conflict")
|
||||
assert resp_data['err_desc'].startswith('update conflict')
|
||||
|
|
|
@ -2,9 +2,9 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--url", help="Url of a passerelle Planitech connector instance")
|
||||
parser.addoption('--url', help='Url of a passerelle Planitech connector instance')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption("--url")
|
||||
return request.config.getoption('--url')
|
||||
|
|
|
@ -113,7 +113,7 @@ def test_main(conn):
|
|||
|
||||
|
||||
def call_generic(conn, endpoint):
|
||||
print("%s \n" % endpoint)
|
||||
print('%s \n' % endpoint)
|
||||
url = conn + '/%s' % endpoint
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
|
|
@ -2,25 +2,25 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--url", help="Url of a passerelle Toulouse Axel connector instance")
|
||||
parser.addoption("--nameid", help="Publik Name ID")
|
||||
parser.addoption("--firstname", help="first name of a user")
|
||||
parser.addoption("--lastname", help="Last name of a user")
|
||||
parser.addoption("--dob", help="Date of birth of a user")
|
||||
parser.addoption("--dui", help="DUI number")
|
||||
parser.addoption('--url', help='Url of a passerelle Toulouse Axel connector instance')
|
||||
parser.addoption('--nameid', help='Publik Name ID')
|
||||
parser.addoption('--firstname', help='first name of a user')
|
||||
parser.addoption('--lastname', help='Last name of a user')
|
||||
parser.addoption('--dob', help='Date of birth of a user')
|
||||
parser.addoption('--dui', help='DUI number')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption("--url")
|
||||
return request.config.getoption('--url')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def user(request):
|
||||
return {
|
||||
'name_id': request.config.getoption("--nameid"),
|
||||
'first_name': request.config.getoption("--firstname"),
|
||||
'last_name': request.config.getoption("--lastname"),
|
||||
'dob': request.config.getoption("--dob"),
|
||||
'dui': request.config.getoption("--dui"),
|
||||
'name_id': request.config.getoption('--nameid'),
|
||||
'first_name': request.config.getoption('--firstname'),
|
||||
'last_name': request.config.getoption('--lastname'),
|
||||
'dob': request.config.getoption('--dob'),
|
||||
'dui': request.config.getoption('--dui'),
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ import requests
|
|||
|
||||
|
||||
def test_link(conn, user):
|
||||
print("Get update management dates")
|
||||
print('Get update management dates')
|
||||
url = conn + '/management_dates'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -21,7 +21,7 @@ def test_link(conn, user):
|
|||
'PRENOM': user['first_name'],
|
||||
'NAISSANCE': user['dob'],
|
||||
}
|
||||
print("Creating link with the following payload:")
|
||||
print('Creating link with the following payload:')
|
||||
pprint.pprint(payload)
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
|
@ -30,7 +30,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print("GET family info")
|
||||
print('GET family info')
|
||||
url = conn + '/family_info?NameID=%s' % name_id
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -158,7 +158,7 @@ def test_link(conn, user):
|
|||
for key in flags:
|
||||
payload[key] = True
|
||||
|
||||
print("Update family info with the following payload:")
|
||||
print('Update family info with the following payload:')
|
||||
pprint.pprint(payload)
|
||||
url = conn + '/update_family_info?NameID=%s' % name_id
|
||||
resp = requests.post(url, json=payload)
|
||||
|
@ -168,7 +168,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print("GET children info")
|
||||
print('GET children info')
|
||||
url = conn + '/children_info?NameID=%s' % (name_id)
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -178,7 +178,7 @@ def test_link(conn, user):
|
|||
print('\n')
|
||||
|
||||
for child in data['data']['ENFANT']:
|
||||
print("GET child info")
|
||||
print('GET child info')
|
||||
url = conn + '/child_info?NameID=%s&idpersonne=%s' % (name_id, child['IDPERSONNE'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -187,7 +187,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print("GET child contact info")
|
||||
print('GET child contact info')
|
||||
url = conn + '/child_contacts_info?NameID=%s&idpersonne=%s' % (name_id, child['IDPERSONNE'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -196,7 +196,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print("Deleting link")
|
||||
print('Deleting link')
|
||||
url = conn + '/unlink?NameID=%s' % name_id
|
||||
resp = requests.post(url)
|
||||
resp.raise_for_status()
|
||||
|
|
|
@ -21,7 +21,7 @@ FAMILY_PAYLOAD = {
|
|||
'rl1': {
|
||||
'civility': 'MME',
|
||||
'firstname': 'Marge',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'maidenName': 'Bouvier',
|
||||
'quality': 'MERE',
|
||||
'birth': {
|
||||
|
@ -32,14 +32,14 @@ FAMILY_PAYLOAD = {
|
|||
'idStreet': '2317',
|
||||
'num': '4',
|
||||
'street1': 'requeried having idStreet provided',
|
||||
'town': 'Springfield',
|
||||
'zipcode': '62701',
|
||||
'town': 'Toulouse',
|
||||
'zipcode': '31400',
|
||||
},
|
||||
},
|
||||
'rl2': {
|
||||
'civility': 'MR',
|
||||
'firstname': 'Homer',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'quality': 'PERE',
|
||||
'birth': {
|
||||
'dateBirth': '1956-05-12',
|
||||
|
@ -96,7 +96,7 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'M',
|
||||
'firstname': 'Bart',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'birth': {
|
||||
'dateBirth': '2014-04-01',
|
||||
'place': 'Brive-la-Gaillarde',
|
||||
|
@ -133,11 +133,11 @@ FAMILY_PAYLOAD = {
|
|||
'hospital': 'Springfield General Hospital',
|
||||
'vaccinList': [
|
||||
{
|
||||
'code': '45',
|
||||
'code': '8',
|
||||
'vaccinationDate': '2011-01-11',
|
||||
},
|
||||
{
|
||||
'code': '24',
|
||||
'code': '1',
|
||||
'vaccinationDate': '2022-02-22',
|
||||
},
|
||||
],
|
||||
|
@ -158,7 +158,7 @@ FAMILY_PAYLOAD = {
|
|||
'personInfo': {
|
||||
'civility': 'MR',
|
||||
'firstname': 'Abraham Jebediah',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'dateBirth': '1927-05-24',
|
||||
'sexe': 'M',
|
||||
'contact': {
|
||||
|
@ -175,7 +175,7 @@ FAMILY_PAYLOAD = {
|
|||
'personInfo': {
|
||||
'civility': 'MME',
|
||||
'firstname': 'Mona Penelope',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'dateBirth': '1929-03-15',
|
||||
'sexe': 'F',
|
||||
'contact': {
|
||||
|
@ -193,7 +193,7 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'F',
|
||||
'firstname': 'Lisa',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'birth': {'dateBirth': '2016-05-09'},
|
||||
'dietcode': 'MENU_SV',
|
||||
'paiInfoBean': {
|
||||
|
@ -203,7 +203,7 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'F',
|
||||
'firstname': 'Maggie',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'birth': {'dateBirth': '2018-12-17'},
|
||||
'dietcode': 'MENU_PAI',
|
||||
'paiInfoBean': {
|
||||
|
@ -213,7 +213,7 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'M',
|
||||
'firstname': 'Hugo',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'birth': {'dateBirth': '2018-04-01'},
|
||||
'dietcode': 'MENU_AV',
|
||||
'paiInfoBean': {
|
||||
|
@ -261,7 +261,10 @@ def pytest_addoption(parser):
|
|||
parser.addoption('--nameid', help='Publik Name ID', default='functest')
|
||||
parser.addoption('--dui', help='DUI number', default='')
|
||||
parser.addoption(
|
||||
'--lastname', help='override lastname to create a new "update" family', default='Simpson'
|
||||
'--lastname', help='override lastname to create a new "update" family', default='Test_Simpson'
|
||||
)
|
||||
parser.addoption(
|
||||
'--quick', action='store_true', help='do not reload referentials to speed-up tests', default=False
|
||||
)
|
||||
|
||||
|
||||
|
@ -348,6 +351,7 @@ def remove_id_on_rlg(conn, rlg):
|
|||
rlg['indicatorList'].sort(key=lambda x: x['code'])
|
||||
rlg['quotientList'].sort(key=lambda x: (x['yearRev'], x['dateStart']))
|
||||
del rlg['indicators'] # order may change
|
||||
del rlg['quotients'] # order may change
|
||||
rlg['subscribeActivityList'] = [] # not managed by test yet
|
||||
del rlg['subscribe_natures'] # order may change
|
||||
|
||||
|
@ -402,7 +406,10 @@ def conn(request):
|
|||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def referentials(conn):
|
||||
def referentials(request, conn):
|
||||
quick = request.config.getoption('--quick')
|
||||
if quick:
|
||||
return
|
||||
url = urlparse.urlparse(conn)
|
||||
slug = url.path.split('/')[2]
|
||||
cmd = (
|
||||
|
@ -415,10 +422,10 @@ def referentials(conn):
|
|||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def create_data(request, conn):
|
||||
def create_data(request, conn, reference_year):
|
||||
name_id = request.config.getoption('--nameid')
|
||||
unlink(conn, name_id)
|
||||
lastname = 'EO_' + uuid4().hex[0:27]
|
||||
lastname = 'TEST_' + uuid4().hex[0:25]
|
||||
|
||||
# create family
|
||||
create_family_payload = copy.deepcopy(FAMILY_PAYLOAD)
|
||||
|
@ -442,6 +449,21 @@ def create_data(request, conn):
|
|||
resp.raise_for_status()
|
||||
create_result = resp.json()
|
||||
assert create_result['err'] == 0
|
||||
|
||||
# add requiered quotient for subscriptions
|
||||
data = read_family(conn, name_id)
|
||||
url = conn + '/update-quotient?NameID=%s&rl_id=%s' % (name_id, data['RL1']['num'])
|
||||
payload = {
|
||||
'yearRev': str(reference_year),
|
||||
'dateStart': '%s-09-01' % (reference_year),
|
||||
'dateEnd': '3000-08-31',
|
||||
'mtt': '5000.0',
|
||||
'cdquo': '1',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
print('\ncreate DUI: %s' % str(create_result['data']['number']))
|
||||
data = diff_family(conn, name_id, 'test_create_family.json')
|
||||
|
||||
|
@ -458,6 +480,58 @@ def create_data(request, conn):
|
|||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def create_data2(request, conn, reference_year):
|
||||
name_id = request.config.getoption('--nameid')
|
||||
unlink(conn, name_id)
|
||||
lastname = 'TEST_' + uuid4().hex[0:25]
|
||||
|
||||
# create family that is not located into Toulouse
|
||||
create_family_payload = copy.deepcopy(FAMILY_PAYLOAD)
|
||||
create_family_payload['rl1']['lastname'] = lastname
|
||||
create_family_payload['rl1']['adresse'] = create_family_payload['rl2']['adresse']
|
||||
create_family_payload['rl2']['adresse'] = copy.deepcopy(FAMILY_PAYLOAD['rl1']['adresse'])
|
||||
for child in create_family_payload['childList']:
|
||||
child['lastname'] = lastname
|
||||
|
||||
url = conn + '/create-family?NameID=%s' % name_id
|
||||
resp = requests.post(url, json=create_family_payload)
|
||||
resp.raise_for_status()
|
||||
create_result = resp.json()
|
||||
assert create_result['err'] == 0
|
||||
|
||||
# add requiered quotient for subscriptions
|
||||
data = read_family(conn, name_id)
|
||||
url = conn + '/update-quotient?NameID=%s&rl_id=%s' % (name_id, data['RL1']['num'])
|
||||
payload = {
|
||||
'yearRev': str(reference_year),
|
||||
'dateStart': '2023-05-15',
|
||||
'dateEnd': '3000-12-31',
|
||||
'mtt': '5000.0',
|
||||
'cdquo': '1',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
print('\ncreate DUI again: %s' % str(create_result['data']['number']))
|
||||
data = diff_family(conn, name_id, 'test_create_family_out_town.json')
|
||||
|
||||
return {
|
||||
'name_id': name_id, # linked
|
||||
'family_id': str(create_result['data']['number']),
|
||||
'family_payload': create_family_payload,
|
||||
'lastname': lastname,
|
||||
'rl1_num': data['RL1']['num'],
|
||||
'rl2_num': data['RL2']['num'],
|
||||
'bart_num': data['childList'][0]['num'],
|
||||
'lisa_num': data['childList'][1]['num'],
|
||||
'maggie_num': data['childList'][2]['num'],
|
||||
'hugo_num': data['childList'][3]['num'],
|
||||
'data': data,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def update_data(request, conn):
|
||||
name_id = request.config.getoption('--nameid')
|
||||
|
@ -616,22 +690,102 @@ def get_subscription_info(nature, activity_text, unit_text, place_text, con, nam
|
|||
}
|
||||
|
||||
|
||||
def get_loisirs_subscribe_info(con, data, year):
|
||||
return get_subscription_info(
|
||||
'LOISIRS',
|
||||
# Sigec made this loisirs activity available for functests
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES',
|
||||
'MERCREDI - 15h30/17h - 8/15Ans',
|
||||
'ARGOULETS',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
def get_loisirs_subscribe_info3(con, data, year):
|
||||
return get_subscription_info(
|
||||
'LOISIRS',
|
||||
# Sigec made this loisirs activity available for functests
|
||||
'Vitrail Fusing 1/2 Je Adultes',
|
||||
'Inscription annuelle',
|
||||
'Centre Culturel ALBAN MINVILLE',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
def get_extrasco_subscribe_info(con, data, year):
|
||||
return get_subscription_info(
|
||||
'EXTRASCO',
|
||||
# Sigec made this extra-sco activity available for functests
|
||||
'ADL ELEMENTAIRE Maourine Juin',
|
||||
'PUBLIK ADL ELEMENTAIRE Maourine JUIN 22/23(NE PAS UTILISER)',
|
||||
'MAOURINE (la) ELEMENTAIRE',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
def get_extrasco_subscribe_info2(con, data, year):
|
||||
return get_subscription_info(
|
||||
'EXTRASCO',
|
||||
# Sigec made this extra-sco activity available for functests
|
||||
'ADL MATERNELLE Lardenne Juin',
|
||||
'PUBLIK ADL MATER JOURNEE AVEC REPAS',
|
||||
'LARDENNE MATERNELLE',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def loisirs_subscribe_info(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
return get_loisirs_subscribe_info(conn, create_data, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def loisirs_subscribe_info2(conn, create_data2, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
return get_loisirs_subscribe_info(conn, create_data2, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def loisirs_subscribe_info3(conn, create_data2, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
return get_loisirs_subscribe_info3(conn, create_data2, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def extrasco_subscribe_info(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
return get_extrasco_subscribe_info(conn, create_data, reference_year)
|
||||
|
||||
return get_subscription_info(
|
||||
'EXTRASCO',
|
||||
# Sigec made this extra-sco activity available for functests
|
||||
'ADL ELEMENTAIRE Maourine Avril 2023',
|
||||
'ADL ELEMENTAIRE Maourine Avril 2023',
|
||||
'MAOURINE (la) ELEMENTAIRE',
|
||||
conn,
|
||||
create_data['name_id'],
|
||||
create_data['bart_num'],
|
||||
reference_year,
|
||||
)
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def extrasco_subscribe_info2(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
return get_extrasco_subscribe_info2(conn, create_data, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def extrasco_subscribe_info3(conn, create_data2, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
return get_extrasco_subscribe_info2(conn, create_data2, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
|
@ -645,11 +799,32 @@ def perisco_subscribe_info(conn, create_data, reference_year):
|
|||
return get_subscription_info(
|
||||
None,
|
||||
# Sigec made this peri-sco activity available for functests
|
||||
'TEMPS DU MIDI 22/23',
|
||||
'TEMPS DU MIDI 22/23',
|
||||
'DOLTO FRANCOISE MATERNELLE',
|
||||
'Temps du midi',
|
||||
'TEST TEMPS DU MIDI 22/23',
|
||||
'AMIDONNIERS ELEMENTAIRE',
|
||||
conn,
|
||||
create_data['name_id'],
|
||||
create_data['bart_num'],
|
||||
reference_year,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def perisco_subscribe_adulte_info(conn, create_data2, reference_year):
|
||||
'''This fixture is a configuration trick from Sigec
|
||||
as peri-sco should not be available for subscription
|
||||
and as a consequence, should not be displayed from catalogs'''
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
return get_subscription_info(
|
||||
None,
|
||||
# Sigec made this peri-sco activity available for functests
|
||||
'RESTAURATION ADULTE',
|
||||
'TEST RESTAURATION ADULTE 22/23',
|
||||
'DOLTO FRANCOISE MATERNELLE',
|
||||
conn,
|
||||
create_data2['name_id'],
|
||||
create_data2['bart_num'],
|
||||
reference_year,
|
||||
)
|
||||
|
|
|
@ -7,6 +7,14 @@
|
|||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUTO_OUT",
|
||||
"code": "AUTO_OUT",
|
||||
"text": "Autorisation de sortie - CLAE",
|
||||
"libelle": "Autorisation de sortie - CLAE",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUTRE",
|
||||
"code": "AUTRE",
|
||||
|
@ -16,6 +24,30 @@
|
|||
"isActive": true,
|
||||
"note": "rebellious"
|
||||
},
|
||||
{
|
||||
"id": "AUT_OUTADL",
|
||||
"code": "AUT_OUTADL",
|
||||
"text": "Autorisation de sortie - ADL",
|
||||
"libelle": "Autorisation de sortie - ADL",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUT_SANT",
|
||||
"code": "AUT_SANT",
|
||||
"text": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"libelle": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUT_TRANS",
|
||||
"code": "AUT_TRANS",
|
||||
"text": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"libelle": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AVL",
|
||||
"code": "AVL",
|
||||
|
@ -27,8 +59,8 @@
|
|||
{
|
||||
"id": "AVS",
|
||||
"code": "AVS",
|
||||
"text": "Assistant de Vie scolaire",
|
||||
"libelle": "Assistant de Vie scolaire ",
|
||||
"text": "Auxiliaire de Vie scolaire",
|
||||
"libelle": "Auxiliaire de Vie scolaire ",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
|
@ -41,6 +73,14 @@
|
|||
"isActive": false,
|
||||
"note": null
|
||||
},
|
||||
{
|
||||
"id": "HPURG",
|
||||
"code": "HPURG",
|
||||
"text": "Hospitalisation / musures d'urgence",
|
||||
"libelle": "Hospitalisation / musures d'urgence",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "LENTILLE",
|
||||
"code": "LENTILLE",
|
||||
|
|
|
@ -27,8 +27,8 @@
|
|||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Springfield",
|
||||
"zipcode": "62701",
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
|
@ -42,7 +42,17 @@
|
|||
"profession": null,
|
||||
"CAFInfo": null,
|
||||
"indicatorList": [],
|
||||
"quotientList": [],
|
||||
"quotientList": [
|
||||
{
|
||||
"yearRev": 2022,
|
||||
"dateStart": "2022-09-01T00:00:00+02:00",
|
||||
"dateEnd": "3000-08-31T00:00:00+02:00",
|
||||
"mtt": 5000.0,
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
}
|
||||
],
|
||||
"subscribeActivityList": [],
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "M\u00e8re"
|
||||
|
@ -62,7 +72,8 @@
|
|||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE"
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": true,
|
||||
|
@ -71,7 +82,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "SIMPSON",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
|
@ -93,7 +104,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "SIMPSON",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
@ -149,13 +160,13 @@
|
|||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -0,0 +1,408 @@
|
|||
{
|
||||
"number": "N/A",
|
||||
"category": "BI",
|
||||
"situation": "MARI",
|
||||
"flagCom": false,
|
||||
"nbChild": 3,
|
||||
"nbTotalChild": 4,
|
||||
"nbAES": "1",
|
||||
"RL1": {
|
||||
"num": "N/A",
|
||||
"firstname": "MARGE",
|
||||
"lastname": "N/A",
|
||||
"maidenName": "BOUVIER",
|
||||
"quality": "MERE",
|
||||
"civility": "MME",
|
||||
"birth": {
|
||||
"dateBirth": "1950-10-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
"num": 742,
|
||||
"numComp": null,
|
||||
"street1": "Evergreen Terrace",
|
||||
"street2": null,
|
||||
"town": "Springfield",
|
||||
"zipcode": "90701"
|
||||
},
|
||||
"contact": {
|
||||
"phone": null,
|
||||
"mobile": null,
|
||||
"mail": null,
|
||||
"isContactMail": false,
|
||||
"isContactSms": false,
|
||||
"isInvoicePdf": false
|
||||
},
|
||||
"profession": null,
|
||||
"CAFInfo": null,
|
||||
"indicatorList": [],
|
||||
"quotientList": [
|
||||
{
|
||||
"yearRev": 2022,
|
||||
"dateStart": "2023-05-15T00:00:00+02:00",
|
||||
"dateEnd": "3000-12-31T00:00:00+01:00",
|
||||
"mtt": 5000.0,
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
}
|
||||
],
|
||||
"subscribeActivityList": [],
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "M\u00e8re"
|
||||
},
|
||||
"RL2": {
|
||||
"num": "N/A",
|
||||
"firstname": "HOMER",
|
||||
"lastname": "N/A",
|
||||
"maidenName": null,
|
||||
"quality": "PERE",
|
||||
"civility": "MR",
|
||||
"birth": {
|
||||
"dateBirth": "1956-05-12T00:00:00+01:00",
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": "2317",
|
||||
"num": 4,
|
||||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
"phone": "0122222222",
|
||||
"mobile": "0622222222",
|
||||
"mail": "homer.simpson@example.org.com",
|
||||
"isContactMail": true,
|
||||
"isContactSms": true,
|
||||
"isInvoicePdf": true
|
||||
},
|
||||
"profession": {
|
||||
"codeCSP": "46",
|
||||
"profession": "Inspecteur de s\u00e9curit\u00e9",
|
||||
"employerName": "Burns",
|
||||
"phone": "0133333333",
|
||||
"addressPro": {
|
||||
"num": null,
|
||||
"street": null,
|
||||
"zipcode": "90701",
|
||||
"town": "Springfield"
|
||||
},
|
||||
"situation": null,
|
||||
"weeklyHours": null,
|
||||
"codeCSP_text": "EMPLOYES"
|
||||
},
|
||||
"CAFInfo": {
|
||||
"number": "123",
|
||||
"organ": "GENE",
|
||||
"organ_text": "CAF 31"
|
||||
},
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "AVL",
|
||||
"libelle": "Auxiliaire de Vie loisirs",
|
||||
"note": null,
|
||||
"choice": null,
|
||||
"code_text": "Auxiliaire de Vie loisirs"
|
||||
},
|
||||
{
|
||||
"code": "ETABSPEC",
|
||||
"libelle": "Etablissement sp\u00e9cialis\u00e9",
|
||||
"note": "SNPP",
|
||||
"choice": null,
|
||||
"code_text": "Etablissement sp\u00e9cialis\u00e9"
|
||||
}
|
||||
],
|
||||
"quotientList": [],
|
||||
"subscribeActivityList": [],
|
||||
"civility_text": "MONSIEUR",
|
||||
"quality_text": "P\u00e8re"
|
||||
},
|
||||
"quotientList": [],
|
||||
"childList": [
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "BART",
|
||||
"sexe": "M",
|
||||
"birth": {
|
||||
"dateBirth": "2014-04-01T00:00:00+02:00",
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": true,
|
||||
"bLeaveAlone": true,
|
||||
"authorizedPersonList": [
|
||||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
"sexe": "M",
|
||||
"contact": {
|
||||
"phone": "0312345678",
|
||||
"mobile": null,
|
||||
"mail": "abe.simpson@example.org"
|
||||
},
|
||||
"civility_text": "MONSIEUR",
|
||||
"sexe_text": "Masculin"
|
||||
},
|
||||
"personQuality": {
|
||||
"code": "13",
|
||||
"libelle": "Famille",
|
||||
"code_text": "Famille"
|
||||
}
|
||||
},
|
||||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
"sexe": "F",
|
||||
"contact": {
|
||||
"phone": "0412345678",
|
||||
"mobile": "0612345678",
|
||||
"mail": "mona.simpson@example.org"
|
||||
},
|
||||
"civility_text": "MADAME",
|
||||
"sexe_text": "F\u00e9minin"
|
||||
},
|
||||
"personQuality": {
|
||||
"code": "13",
|
||||
"libelle": "Famille",
|
||||
"code_text": "Famille"
|
||||
}
|
||||
}
|
||||
],
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "AUTRE",
|
||||
"libelle": "Autre",
|
||||
"note": "rebellious",
|
||||
"choice": null,
|
||||
"code_text": "Autre"
|
||||
},
|
||||
{
|
||||
"code": "LUNETTE",
|
||||
"libelle": "Port de lunettes",
|
||||
"note": null,
|
||||
"choice": null,
|
||||
"code_text": "Port de lunettes"
|
||||
}
|
||||
],
|
||||
"medicalRecord": {
|
||||
"familyDoctor": {
|
||||
"name": "MONROE",
|
||||
"phone": "0612341234",
|
||||
"address": {
|
||||
"street1": "Alameda",
|
||||
"zipcode": "90701",
|
||||
"town": "Springfield"
|
||||
}
|
||||
},
|
||||
"allergy1": "butterscotch, imitation butterscotch, glow-in-the-dark monster make-up",
|
||||
"allergy2": "shrimp and cauliflower",
|
||||
"comment1": "the shrimp allergy isn't fully identified",
|
||||
"comment2": null,
|
||||
"observ1": "Ay Caramba!",
|
||||
"observ2": "Eat my shorts!",
|
||||
"isAuthHospital": false,
|
||||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_01",
|
||||
"dateDeb": "2022-09-01T00:00:00+02:00",
|
||||
"dateFin": "2023-07-01T00:00:00+02:00",
|
||||
"description": "mischievous, rebellious, misunderstood, disruptive",
|
||||
"code_text": "PAI Alimentaire Int\u00e9gral"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "Masculin",
|
||||
"dietcode_text": "Avec viande"
|
||||
},
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "LISA",
|
||||
"sexe": "F",
|
||||
"birth": {
|
||||
"dateBirth": "2016-05-09T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_SV",
|
||||
"bPhoto": false,
|
||||
"bLeaveAlone": false,
|
||||
"authorizedPersonList": [],
|
||||
"indicatorList": [],
|
||||
"medicalRecord": null,
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_02",
|
||||
"dateDeb": null,
|
||||
"dateFin": null,
|
||||
"description": null,
|
||||
"code_text": "PAI Alimentaire Partiel"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "F\u00e9minin",
|
||||
"dietcode_text": "Sans viande"
|
||||
},
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "MAGGIE",
|
||||
"sexe": "F",
|
||||
"birth": {
|
||||
"dateBirth": "2018-12-17T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_PAI",
|
||||
"bPhoto": false,
|
||||
"bLeaveAlone": false,
|
||||
"authorizedPersonList": [],
|
||||
"indicatorList": [],
|
||||
"medicalRecord": null,
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_02",
|
||||
"dateDeb": null,
|
||||
"dateFin": null,
|
||||
"description": null,
|
||||
"code_text": "PAI Alimentaire Partiel"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "F\u00e9minin",
|
||||
"dietcode_text": "Panier PAI"
|
||||
},
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "HUGO",
|
||||
"sexe": "M",
|
||||
"birth": {
|
||||
"dateBirth": "2018-04-01T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": false,
|
||||
"bLeaveAlone": false,
|
||||
"authorizedPersonList": [],
|
||||
"indicatorList": [],
|
||||
"medicalRecord": null,
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_01",
|
||||
"dateDeb": null,
|
||||
"dateFin": null,
|
||||
"description": null,
|
||||
"code_text": "PAI Alimentaire Int\u00e9gral"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "Masculin",
|
||||
"dietcode_text": "Avec viande"
|
||||
}
|
||||
],
|
||||
"emergencyPersonList": [
|
||||
{
|
||||
"numPerson": "N/A",
|
||||
"civility": "MME",
|
||||
"firstname": "PATTY",
|
||||
"lastname": "BOUVIER",
|
||||
"dateBirth": "1948-08-30T00:00:00+01:00",
|
||||
"sexe": "F",
|
||||
"quality": "13",
|
||||
"contact": {
|
||||
"phone": "0112345678",
|
||||
"mobile": "0612345678",
|
||||
"mail": "patty.bouvier@example.org"
|
||||
},
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "Famille",
|
||||
"sexe_text": "F\u00e9minin"
|
||||
},
|
||||
{
|
||||
"numPerson": "N/A",
|
||||
"civility": "MME",
|
||||
"firstname": "SELMA",
|
||||
"lastname": "BOUVIER",
|
||||
"dateBirth": "1946-04-29T00:00:00+01:00",
|
||||
"sexe": "F",
|
||||
"quality": "13",
|
||||
"contact": {
|
||||
"phone": "0112345678",
|
||||
"mobile": "0612345678",
|
||||
"mail": "selma.bouvier@example.org"
|
||||
},
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "Famille",
|
||||
"sexe_text": "F\u00e9minin"
|
||||
}
|
||||
],
|
||||
"indicatorList": [],
|
||||
"childErrorList": [],
|
||||
"category_text": "BIPARENTALE",
|
||||
"situation_text": "MARIE(E)",
|
||||
"family_id": "N/A"
|
||||
}
|
|
@ -12,7 +12,8 @@
|
|||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE"
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
|
|
|
@ -0,0 +1,125 @@
|
|||
[
|
||||
{
|
||||
"id": "INDI_APE_ENF",
|
||||
"text": "INDI_APE_ENF",
|
||||
"level": "INDI_APE_ENF",
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "APE_COMPO3",
|
||||
"libelle": "CF-0/1 actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HBOTH",
|
||||
"libelle": "SP-handicap parent et fratrie",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HPAR",
|
||||
"libelle": "SP-handicap parents",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_MULTIACC",
|
||||
"libelle": "CF-2 enfants \u00e0 accueillir",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_SITUP",
|
||||
"libelle": "SP-situation particuli\u00e8re personne",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "INDI_APE_FAM",
|
||||
"text": "INDI_APE_FAM",
|
||||
"level": "INDI_APE_FAM",
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "APE_COMPO2",
|
||||
"libelle": "CF-1/2 actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_COMPO4",
|
||||
"libelle": "CF-0/2 actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_FIRSTC",
|
||||
"libelle": "CF-premier enfant",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HAND",
|
||||
"libelle": "H-handicap ou maladie chronique",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_NAIM",
|
||||
"libelle": "CF-naissance multiple",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "INDI_APE_RES",
|
||||
"text": "INDI_APE_RES",
|
||||
"level": "INDI_APE_RES",
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "APE_COMPO1",
|
||||
"libelle": "CF-100% actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_FRAT",
|
||||
"libelle": "CF-Fratrie d\u00e9j\u00e0 en accueil",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_KOFRAT",
|
||||
"libelle": "CF-sans proposition pour une partie de la fratrie",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HFRAT",
|
||||
"libelle": "SP-handicap fratrie",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_SPLOG",
|
||||
"libelle": "SP-situation particuli\u00e8re logement",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_ALLO",
|
||||
"libelle": "SP-accompagnement enfant allophone",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE-MINE",
|
||||
"libelle": "SP-parent mineur",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
|
@ -1,9 +1,17 @@
|
|||
[
|
||||
{
|
||||
"id": "AVS",
|
||||
"code": "AVS",
|
||||
"text": "Assistant de Vie scolaire",
|
||||
"libelle": "Assistant de Vie scolaire ",
|
||||
"id": "AUT_OUTADL",
|
||||
"code": "AUT_OUTADL",
|
||||
"text": "Autorisation de sortie - ADL",
|
||||
"libelle": "Autorisation de sortie - ADL",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AUTO_OUT",
|
||||
"code": "AUTO_OUT",
|
||||
"text": "Autorisation de sortie - CLAE",
|
||||
"libelle": "Autorisation de sortie - CLAE",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
|
@ -23,6 +31,14 @@
|
|||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AVS",
|
||||
"code": "AVS",
|
||||
"text": "Auxiliaire de Vie scolaire",
|
||||
"libelle": "Auxiliaire de Vie scolaire ",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "ETABSPEC",
|
||||
"code": "ETABSPEC",
|
||||
|
@ -31,6 +47,30 @@
|
|||
"typeDesc": "NOTE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "HPURG",
|
||||
"code": "HPURG",
|
||||
"text": "Hospitalisation / musures d'urgence",
|
||||
"libelle": "Hospitalisation / musures d'urgence",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AUT_SANT",
|
||||
"code": "AUT_SANT",
|
||||
"text": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"libelle": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AUT_TRANS",
|
||||
"code": "AUT_TRANS",
|
||||
"text": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"libelle": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "MDPH",
|
||||
"code": "MDPH",
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
[
|
||||
{
|
||||
"id": "MORAL",
|
||||
"code": "MORAL",
|
||||
"text": "",
|
||||
"libelle": null
|
||||
},
|
||||
{
|
||||
"id": "MME",
|
||||
"code": "MME",
|
||||
|
@ -10,11 +16,5 @@
|
|||
"code": "MR",
|
||||
"text": "MONSIEUR",
|
||||
"libelle": "MONSIEUR"
|
||||
},
|
||||
{
|
||||
"id": "MORAL",
|
||||
"code": "MORAL",
|
||||
"text": "MORAL",
|
||||
"libelle": "MORAL"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -1,4 +1,11 @@
|
|||
[
|
||||
{
|
||||
"id": "87",
|
||||
"code": "87",
|
||||
"rang": "PERSON",
|
||||
"text": "Acte de d\u00e9c\u00e8s",
|
||||
"libelle": "Acte de d\u00e9c\u00e8s"
|
||||
},
|
||||
{
|
||||
"id": "43",
|
||||
"code": "43",
|
||||
|
@ -188,6 +195,13 @@
|
|||
"text": "Certificat de scolarit\u00e9",
|
||||
"libelle": "Certificat de scolarit\u00e9"
|
||||
},
|
||||
{
|
||||
"id": "93",
|
||||
"code": "93",
|
||||
"rang": "PERSON",
|
||||
"text": "Certificat de travail",
|
||||
"libelle": "Certificat de travail"
|
||||
},
|
||||
{
|
||||
"id": "74",
|
||||
"code": "74",
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
[
|
||||
{
|
||||
"id": "05DERO-8",
|
||||
"code": "05DERO-8",
|
||||
"text": "DERO05 - SANTE",
|
||||
"libelle": "DERO05 - SANTE"
|
||||
},
|
||||
{
|
||||
"id": "05DERO-6",
|
||||
"code": "05DERO-6",
|
||||
"text": "DERO05 - SANTE : SANTE / ORGANISATION",
|
||||
"libelle": "DERO05 - SANTE : SANTE / ORGANISATION"
|
||||
},
|
||||
{
|
||||
"id": "10DERO-2",
|
||||
"code": "10DERO-2",
|
||||
"text": "DERO10 - ORGANISATION",
|
||||
"libelle": "DERO10 - ORGANISATION"
|
||||
},
|
||||
{
|
||||
"id": "11DERO-1",
|
||||
"code": "11DERO-1",
|
||||
"text": "DERO11 - AUTRE",
|
||||
"libelle": "DERO11 - AUTRE"
|
||||
}
|
||||
]
|
|
@ -0,0 +1,56 @@
|
|||
[
|
||||
{
|
||||
"id": 102,
|
||||
"code": 102,
|
||||
"text": "CANTINE / CLAE",
|
||||
"libelle": "CANTINE / CLAE"
|
||||
},
|
||||
{
|
||||
"id": 103,
|
||||
"code": 103,
|
||||
"text": "CCAS",
|
||||
"libelle": "CCAS"
|
||||
},
|
||||
{
|
||||
"id": 101,
|
||||
"code": 101,
|
||||
"text": "DASC",
|
||||
"libelle": "DASC"
|
||||
},
|
||||
{
|
||||
"id": 104,
|
||||
"code": 104,
|
||||
"text": "DSCS",
|
||||
"libelle": "DSCS"
|
||||
},
|
||||
{
|
||||
"id": 105,
|
||||
"code": 105,
|
||||
"text": "ENFANCE LOISIRS",
|
||||
"libelle": "ENFANCE LOISIRS"
|
||||
},
|
||||
{
|
||||
"id": 106,
|
||||
"code": 106,
|
||||
"text": "PARCOURS EDUCATIFS",
|
||||
"libelle": "PARCOURS EDUCATIFS"
|
||||
},
|
||||
{
|
||||
"id": 107,
|
||||
"code": 107,
|
||||
"text": "REMBOURSEMENT",
|
||||
"libelle": "REMBOURSEMENT"
|
||||
},
|
||||
{
|
||||
"id": 108,
|
||||
"code": 108,
|
||||
"text": "SENIORS",
|
||||
"libelle": "SENIORS"
|
||||
},
|
||||
{
|
||||
"id": 109,
|
||||
"code": 109,
|
||||
"text": "SPORT",
|
||||
"libelle": "SPORT"
|
||||
}
|
||||
]
|
|
@ -1,12 +1,4 @@
|
|||
[
|
||||
{
|
||||
"id": "AVS",
|
||||
"code": "AVS",
|
||||
"text": "Assistant de Vie scolaire",
|
||||
"libelle": "Assistant de Vie scolaire ",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AVL",
|
||||
"code": "AVL",
|
||||
|
@ -15,6 +7,14 @@
|
|||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AVS",
|
||||
"code": "AVS",
|
||||
"text": "Auxiliaire de Vie scolaire",
|
||||
"libelle": "Auxiliaire de Vie scolaire ",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "ETABSPEC",
|
||||
"code": "ETABSPEC",
|
||||
|
|
|
@ -0,0 +1,92 @@
|
|||
[
|
||||
{
|
||||
"id": "CE1",
|
||||
"age": 7,
|
||||
"code": "CE1",
|
||||
"text": "Cours \u00e9l\u00e9mentaire 1",
|
||||
"nature": null,
|
||||
"libelle": "Cours \u00e9l\u00e9mentaire 1",
|
||||
"numOrder": "6",
|
||||
"nextLevelCode": "CE2"
|
||||
},
|
||||
{
|
||||
"id": "CE2",
|
||||
"age": 8,
|
||||
"code": "CE2",
|
||||
"text": "Cours \u00e9l\u00e9mentaire 2",
|
||||
"nature": null,
|
||||
"libelle": "Cours \u00e9l\u00e9mentaire 2",
|
||||
"numOrder": "7",
|
||||
"nextLevelCode": "CM1"
|
||||
},
|
||||
{
|
||||
"id": "CM1",
|
||||
"age": 9,
|
||||
"code": "CM1",
|
||||
"text": "Cours moyen 1",
|
||||
"nature": null,
|
||||
"libelle": "Cours moyen 1",
|
||||
"numOrder": "8",
|
||||
"nextLevelCode": "CM2"
|
||||
},
|
||||
{
|
||||
"id": "CM2",
|
||||
"age": 10,
|
||||
"code": "CM2",
|
||||
"text": "Cours moyen 2",
|
||||
"nature": null,
|
||||
"libelle": "Cours moyen 2",
|
||||
"numOrder": "9",
|
||||
"nextLevelCode": null
|
||||
},
|
||||
{
|
||||
"id": "CP",
|
||||
"age": 6,
|
||||
"code": "CP",
|
||||
"text": "Cours pr\u00e9paratoire",
|
||||
"nature": null,
|
||||
"libelle": "Cours pr\u00e9paratoire",
|
||||
"numOrder": "5",
|
||||
"nextLevelCode": "CE1"
|
||||
},
|
||||
{
|
||||
"id": "GS",
|
||||
"age": 5,
|
||||
"code": "GS",
|
||||
"text": "Section grand",
|
||||
"nature": null,
|
||||
"libelle": "Section grand",
|
||||
"numOrder": "4",
|
||||
"nextLevelCode": "CP"
|
||||
},
|
||||
{
|
||||
"id": "MS",
|
||||
"age": 4,
|
||||
"code": "MS",
|
||||
"text": "Section moyen",
|
||||
"nature": null,
|
||||
"libelle": "Section moyen",
|
||||
"numOrder": "3",
|
||||
"nextLevelCode": "GS"
|
||||
},
|
||||
{
|
||||
"id": "PS",
|
||||
"age": 3,
|
||||
"code": "PS",
|
||||
"text": "Section petit",
|
||||
"nature": null,
|
||||
"libelle": "Section petit",
|
||||
"numOrder": "2",
|
||||
"nextLevelCode": "MS"
|
||||
},
|
||||
{
|
||||
"id": "TPS",
|
||||
"age": 2,
|
||||
"code": "TPS",
|
||||
"text": "Section tout petit",
|
||||
"nature": null,
|
||||
"libelle": "Section tout petit",
|
||||
"numOrder": "1",
|
||||
"nextLevelCode": "PS"
|
||||
}
|
||||
]
|
|
@ -0,0 +1,20 @@
|
|||
[
|
||||
{
|
||||
"id": 2022,
|
||||
"text": "2022",
|
||||
"schoolYear": 2022,
|
||||
"dateEndYearSchool": "2023-07-07T00:00:00+02:00",
|
||||
"dateStartYearSchool": "2022-09-01T00:00:00+02:00",
|
||||
"dateEndSubscribeSchool": "2023-09-01T00:00:00+02:00",
|
||||
"dateStartSubscribeSchool": "2022-09-01T00:00:00+02:00"
|
||||
},
|
||||
{
|
||||
"id": 2023,
|
||||
"text": "2023",
|
||||
"schoolYear": 2023,
|
||||
"dateEndYearSchool": "2024-07-07T00:00:00+02:00",
|
||||
"dateStartYearSchool": "2023-09-04T00:00:00+02:00",
|
||||
"dateEndSubscribeSchool": "2023-09-01T00:00:00+02:00",
|
||||
"dateStartSubscribeSchool": "2022-09-01T00:00:00+02:00"
|
||||
}
|
||||
]
|
|
@ -1,33 +1,9 @@
|
|||
[
|
||||
{
|
||||
"id": "105",
|
||||
"code": "105",
|
||||
"text": "AUTRE",
|
||||
"libelle": "AUTRE"
|
||||
},
|
||||
{
|
||||
"id": "30",
|
||||
"code": "30",
|
||||
"text": "B.C.G.",
|
||||
"libelle": "B.C.G."
|
||||
},
|
||||
{
|
||||
"id": "56",
|
||||
"code": "56",
|
||||
"text": "BOOSTRIX",
|
||||
"libelle": "BOOSTRIX"
|
||||
},
|
||||
{
|
||||
"id": "27",
|
||||
"code": "27",
|
||||
"text": "CHOLERA",
|
||||
"libelle": "CHOLERA"
|
||||
},
|
||||
{
|
||||
"id": "48",
|
||||
"code": "48",
|
||||
"text": "Contr\u00f4le B.C.G.",
|
||||
"libelle": "Contr\u00f4le B.C.G."
|
||||
"text": "BCG",
|
||||
"libelle": "BCG"
|
||||
},
|
||||
{
|
||||
"id": "3",
|
||||
|
@ -41,107 +17,17 @@
|
|||
"text": "DIPHTERIE",
|
||||
"libelle": "DIPHTERIE"
|
||||
},
|
||||
{
|
||||
"id": "6",
|
||||
"code": "6",
|
||||
"text": "DIPHTERIE TETANOS",
|
||||
"libelle": "DIPHTERIE TETANOS"
|
||||
},
|
||||
{
|
||||
"id": "9",
|
||||
"code": "9",
|
||||
"text": "DIPHT TETANOS COQ",
|
||||
"libelle": "DIPHT TETANOS COQ"
|
||||
},
|
||||
{
|
||||
"id": "19",
|
||||
"code": "19",
|
||||
"text": "DT BISRUDIVAX",
|
||||
"libelle": "DT BISRUDIVAX"
|
||||
},
|
||||
{
|
||||
"id": "10",
|
||||
"code": "10",
|
||||
"text": "DT COQ POLIO",
|
||||
"libelle": "DT COQ POLIO"
|
||||
},
|
||||
{
|
||||
"id": "13",
|
||||
"code": "13",
|
||||
"text": "DT COQ POLIO IPAD",
|
||||
"libelle": "DT COQ POLIO IPAD"
|
||||
},
|
||||
{
|
||||
"id": "8",
|
||||
"code": "8",
|
||||
"text": "DT POLIO",
|
||||
"libelle": "DT POLIO"
|
||||
},
|
||||
{
|
||||
"id": "45",
|
||||
"code": "45",
|
||||
"text": "DT TETANOS COQ",
|
||||
"libelle": "DT TETANOS COQ"
|
||||
},
|
||||
{
|
||||
"id": "11",
|
||||
"code": "11",
|
||||
"text": "DT TYPHOIDE",
|
||||
"libelle": "DT TYPHOIDE"
|
||||
},
|
||||
{
|
||||
"id": "129",
|
||||
"code": "129",
|
||||
"text": "ENGERIX",
|
||||
"libelle": "ENGERIX"
|
||||
},
|
||||
{
|
||||
"id": "26",
|
||||
"code": "26",
|
||||
"text": "FIEVRE JAUNE",
|
||||
"libelle": "FIEVRE JAUNE"
|
||||
},
|
||||
{
|
||||
"id": "4",
|
||||
"code": "4",
|
||||
"text": "F.TYPHOIDES",
|
||||
"libelle": "F.TYPHOIDES"
|
||||
},
|
||||
{
|
||||
"id": "144",
|
||||
"code": "144",
|
||||
"text": "GRIPPE",
|
||||
"libelle": "GRIPPE"
|
||||
},
|
||||
{
|
||||
"id": "143",
|
||||
"code": "143",
|
||||
"text": "HAEMOPHILUS HIB",
|
||||
"libelle": "HAEMOPHILUS HIB"
|
||||
},
|
||||
{
|
||||
"id": "17",
|
||||
"code": "17",
|
||||
"text": "HAVRIX",
|
||||
"libelle": "HAVRIX"
|
||||
"text": "DTPOLIO",
|
||||
"libelle": "DTPOLIO"
|
||||
},
|
||||
{
|
||||
"id": "29",
|
||||
"code": "29",
|
||||
"text": "HEPATITE B",
|
||||
"libelle": "HEPATITE B"
|
||||
},
|
||||
{
|
||||
"id": "146",
|
||||
"code": "146",
|
||||
"text": "HEXAXIM",
|
||||
"libelle": "HEXAXIM"
|
||||
},
|
||||
{
|
||||
"id": "59",
|
||||
"code": "59",
|
||||
"text": "HEXYON",
|
||||
"libelle": "HEXYON"
|
||||
"text": "HEPATITEB",
|
||||
"libelle": "HEPATITEB"
|
||||
},
|
||||
{
|
||||
"id": "16",
|
||||
|
@ -150,226 +36,28 @@
|
|||
"libelle": "HIB"
|
||||
},
|
||||
{
|
||||
"id": "24",
|
||||
"code": "24",
|
||||
"text": "IMOVAX OREILLONS",
|
||||
"libelle": "IMOVAX OREILLONS"
|
||||
"id": "152",
|
||||
"code": "152",
|
||||
"text": "IIP",
|
||||
"libelle": "IIP"
|
||||
},
|
||||
{
|
||||
"id": "121",
|
||||
"code": "121",
|
||||
"text": "INFANRIX",
|
||||
"libelle": "INFANRIX"
|
||||
"id": "151",
|
||||
"code": "151",
|
||||
"text": "MENINGOCOQUE",
|
||||
"libelle": "MENINGOCOQUE"
|
||||
},
|
||||
{
|
||||
"id": "52",
|
||||
"code": "52",
|
||||
"text": "INFANRIX HEXA",
|
||||
"libelle": "INFANRIX HEXA"
|
||||
},
|
||||
{
|
||||
"id": "32",
|
||||
"code": "32",
|
||||
"text": "INFANRIX POLIO",
|
||||
"libelle": "INFANRIX POLIO"
|
||||
},
|
||||
{
|
||||
"id": "33",
|
||||
"code": "33",
|
||||
"text": "INFANRIX POLIO HIB",
|
||||
"libelle": "INFANRIX POLIO HIB"
|
||||
},
|
||||
{
|
||||
"id": "51",
|
||||
"code": "51",
|
||||
"text": "INFANRIX QUINTA",
|
||||
"libelle": "INFANRIX QUINTA"
|
||||
},
|
||||
{
|
||||
"id": "55",
|
||||
"code": "55",
|
||||
"text": "INFANRIX TETRA",
|
||||
"libelle": "INFANRIX TETRA"
|
||||
},
|
||||
{
|
||||
"id": "147",
|
||||
"code": "147",
|
||||
"text": "INFLUVAC TETRA",
|
||||
"libelle": "INFLUVAC TETRA"
|
||||
},
|
||||
{
|
||||
"id": "137",
|
||||
"code": "137",
|
||||
"text": "INNUGRIP",
|
||||
"libelle": "INNUGRIP"
|
||||
},
|
||||
{
|
||||
"id": "18",
|
||||
"code": "18",
|
||||
"text": "LEPTOSPIROSE",
|
||||
"libelle": "LEPTOSPIROSE"
|
||||
},
|
||||
{
|
||||
"id": "22",
|
||||
"code": "22",
|
||||
"text": "MENINGITE",
|
||||
"libelle": "MENINGITE"
|
||||
},
|
||||
{
|
||||
"id": "130",
|
||||
"code": "130",
|
||||
"text": "MENINGITEC",
|
||||
"libelle": "MENINGITEC"
|
||||
},
|
||||
{
|
||||
"id": "123",
|
||||
"code": "123",
|
||||
"text": "MENINVAC",
|
||||
"libelle": "MENINVAC"
|
||||
},
|
||||
{
|
||||
"id": "120",
|
||||
"code": "120",
|
||||
"text": "MENINVACT",
|
||||
"libelle": "MENINVACT"
|
||||
},
|
||||
{
|
||||
"id": "139",
|
||||
"code": "139",
|
||||
"text": "MENJUGATE",
|
||||
"libelle": "MENJUGATE"
|
||||
},
|
||||
{
|
||||
"id": "149",
|
||||
"code": "149",
|
||||
"text": "M-M RVAX PRO",
|
||||
"libelle": "M-M RVAX PRO"
|
||||
},
|
||||
{
|
||||
"id": "133",
|
||||
"code": "133",
|
||||
"text": "MONOTEST",
|
||||
"libelle": "MONOTEST"
|
||||
},
|
||||
{
|
||||
"id": "124",
|
||||
"code": "124",
|
||||
"text": "MONOVAX",
|
||||
"libelle": "MONOVAX"
|
||||
},
|
||||
{
|
||||
"id": "132",
|
||||
"code": "132",
|
||||
"text": "NEISVAC",
|
||||
"libelle": "NEISVAC"
|
||||
},
|
||||
{
|
||||
"id": "110",
|
||||
"code": "110",
|
||||
"text": "OTITE",
|
||||
"libelle": "OTITE"
|
||||
},
|
||||
{
|
||||
"id": "134",
|
||||
"code": "134",
|
||||
"text": "PANENZA",
|
||||
"libelle": "PANENZA"
|
||||
},
|
||||
{
|
||||
"id": "31",
|
||||
"code": "31",
|
||||
"text": "PENTACOQ",
|
||||
"libelle": "PENTACOQ"
|
||||
},
|
||||
{
|
||||
"id": "53",
|
||||
"code": "53",
|
||||
"text": "PENTAVAC",
|
||||
"libelle": "PENTAVAC"
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
"code": "2",
|
||||
"text": "POLIOMYELITE",
|
||||
"libelle": "POLIOMYELITE"
|
||||
},
|
||||
{
|
||||
"id": "128",
|
||||
"code": "128",
|
||||
"text": "PREVENAR",
|
||||
"libelle": "PREVENAR"
|
||||
},
|
||||
{
|
||||
"id": "125",
|
||||
"code": "125",
|
||||
"text": "PRIORIX",
|
||||
"libelle": "PRIORIX"
|
||||
},
|
||||
{
|
||||
"id": "54",
|
||||
"code": "54",
|
||||
"text": "REPEVAX",
|
||||
"libelle": "REPEVAX"
|
||||
},
|
||||
{
|
||||
"id": "47",
|
||||
"code": "47",
|
||||
"text": "REVAXIS",
|
||||
"libelle": "REVAXIS"
|
||||
"id": "150",
|
||||
"code": "150",
|
||||
"text": "POLIO",
|
||||
"libelle": "POLIO"
|
||||
},
|
||||
{
|
||||
"id": "28",
|
||||
"code": "28",
|
||||
"text": "R O R",
|
||||
"libelle": "R O R"
|
||||
},
|
||||
{
|
||||
"id": "127",
|
||||
"code": "127",
|
||||
"text": "ROR VAX",
|
||||
"libelle": "ROR VAX"
|
||||
},
|
||||
{
|
||||
"id": "135",
|
||||
"code": "135",
|
||||
"text": "ROTARIX",
|
||||
"libelle": "ROTARIX"
|
||||
},
|
||||
{
|
||||
"id": "20",
|
||||
"code": "20",
|
||||
"text": "ROUVAX",
|
||||
"libelle": "ROUVAX"
|
||||
},
|
||||
{
|
||||
"id": "23",
|
||||
"code": "23",
|
||||
"text": "RUDI ROUVAX",
|
||||
"libelle": "RUDI ROUVAX"
|
||||
},
|
||||
{
|
||||
"id": "21",
|
||||
"code": "21",
|
||||
"text": "RUDIVAX",
|
||||
"libelle": "RUDIVAX"
|
||||
},
|
||||
{
|
||||
"id": "113",
|
||||
"code": "113",
|
||||
"text": "SCARLATINE",
|
||||
"libelle": "SCARLATINE"
|
||||
},
|
||||
{
|
||||
"id": "14",
|
||||
"code": "14",
|
||||
"text": "SERUM ANTI-TETANIQUE",
|
||||
"libelle": "SERUM ANTI-TETANIQUE"
|
||||
},
|
||||
{
|
||||
"id": "141",
|
||||
"code": "141",
|
||||
"text": "SYNAGIS",
|
||||
"libelle": "SYNAGIS"
|
||||
"text": "ROR",
|
||||
"libelle": "ROR"
|
||||
},
|
||||
{
|
||||
"id": "1",
|
||||
|
@ -377,46 +65,10 @@
|
|||
"text": "TETANOS",
|
||||
"libelle": "TETANOS"
|
||||
},
|
||||
{
|
||||
"id": "7",
|
||||
"code": "7",
|
||||
"text": "TETANOS POLIO",
|
||||
"libelle": "TETANOS POLIO"
|
||||
},
|
||||
{
|
||||
"id": "12",
|
||||
"code": "12",
|
||||
"text": "TETRA COQ",
|
||||
"libelle": "TETRA COQ"
|
||||
},
|
||||
{
|
||||
"id": "46",
|
||||
"code": "46",
|
||||
"text": "TETRAVAC ACELLULAIRE",
|
||||
"libelle": "TETRAVAC ACELLULAIRE"
|
||||
},
|
||||
{
|
||||
"id": "107",
|
||||
"code": "107",
|
||||
"text": "VARICELLE",
|
||||
"libelle": "VARICELLE"
|
||||
},
|
||||
{
|
||||
"id": "15",
|
||||
"code": "15",
|
||||
"text": "VARIOLE",
|
||||
"libelle": "VARIOLE"
|
||||
},
|
||||
{
|
||||
"id": "34",
|
||||
"code": "34",
|
||||
"text": "VAXELIS",
|
||||
"libelle": "VAXELIS"
|
||||
},
|
||||
{
|
||||
"id": "148",
|
||||
"code": "148",
|
||||
"text": "VAXIGRIP",
|
||||
"libelle": "VAXIGRIP"
|
||||
"text": "TETRACOQ",
|
||||
"libelle": "TETRACOQ"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -10,8 +10,8 @@
|
|||
{
|
||||
"id": "AVS",
|
||||
"code": "AVS",
|
||||
"text": "Assistant de Vie scolaire",
|
||||
"libelle": "Assistant de Vie scolaire ",
|
||||
"text": "Auxiliaire de Vie scolaire",
|
||||
"libelle": "Auxiliaire de Vie scolaire ",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "SIMPSON",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
|
@ -40,7 +40,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "SIMPSON",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
@ -96,13 +96,13 @@
|
|||
"hospital": "Springfield General Hospital",
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
{
|
||||
"familyDoctor": {
|
||||
"name": "HIBBERT",
|
||||
"phone": "0656785678",
|
||||
"address": {
|
||||
"street1": "General Hospital",
|
||||
"zipcode": "90701",
|
||||
"town": "Springfield"
|
||||
}
|
||||
},
|
||||
"allergy1": null,
|
||||
"allergy2": null,
|
||||
"comment1": null,
|
||||
"comment2": null,
|
||||
"observ1": null,
|
||||
"observ2": null,
|
||||
"isAuthHospital": true,
|
||||
"hospital": "Springfield General Hospital",
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -10,13 +10,13 @@
|
|||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "SIMPSON",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
|
|
@ -27,8 +27,8 @@
|
|||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Springfield",
|
||||
"zipcode": "62701",
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
|
@ -61,7 +61,8 @@
|
|||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE"
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
|
@ -135,7 +136,8 @@
|
|||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE"
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": true,
|
||||
|
@ -144,7 +146,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "SIMPSON",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
|
@ -166,7 +168,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "SIMPSON",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
@ -222,13 +224,13 @@
|
|||
"hospital": "Springfield General Hospital",
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"number": "N/A",
|
||||
"category": "AUTR",
|
||||
"situation": "AUTR",
|
||||
"flagCom": true,
|
||||
"flagCom": false,
|
||||
"nbChild": 0,
|
||||
"nbTotalChild": 0,
|
||||
"nbAES": "0",
|
||||
|
@ -153,13 +153,13 @@
|
|||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -4,24 +4,33 @@
|
|||
"dateStart": "2022-01-02T00:00:00+01:00",
|
||||
"dateEnd": "2022-12-31T00:00:00+01:00",
|
||||
"mtt": 1500.33,
|
||||
"cdquo": "1",
|
||||
"cdquo": "2",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
"cdquo_text": "Revenus Petite enfance"
|
||||
},
|
||||
{
|
||||
"yearRev": 2021,
|
||||
"dateStart": "2022-01-01T00:00:00+01:00",
|
||||
"dateEnd": "2022-01-01T00:00:00+01:00",
|
||||
"mtt": 1500.33,
|
||||
"cdquo": "1",
|
||||
"cdquo": "2",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
"cdquo_text": "Revenus Petite enfance"
|
||||
},
|
||||
{
|
||||
"yearRev": 2021,
|
||||
"dateStart": "2022-01-02T00:00:00+01:00",
|
||||
"dateEnd": "2022-12-31T00:00:00+01:00",
|
||||
"mtt": 1500.33,
|
||||
"cdquo": "2",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus Petite enfance"
|
||||
},
|
||||
{
|
||||
"yearRev": 2022,
|
||||
"dateStart": "2022-09-01T00:00:00+02:00",
|
||||
"dateEnd": "3000-08-31T00:00:00+02:00",
|
||||
"mtt": 5000.0,
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
|
|
|
@ -18,8 +18,8 @@
|
|||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Springfield",
|
||||
"zipcode": "62701",
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
|
|
|
@ -5,8 +5,9 @@ from .conftest import diff
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"ref",
|
||||
'ref',
|
||||
[
|
||||
'ape-indicators',
|
||||
'category',
|
||||
'child-indicator',
|
||||
'civility',
|
||||
|
@ -15,11 +16,16 @@ from .conftest import diff
|
|||
'csp',
|
||||
'dietcode',
|
||||
'document',
|
||||
'exemption-reasons',
|
||||
#'nursery',
|
||||
'organ',
|
||||
'pai',
|
||||
'quality',
|
||||
'quotient',
|
||||
#'regie',
|
||||
'rl-indicator',
|
||||
'school-levels',
|
||||
'school-years',
|
||||
'situation',
|
||||
'street',
|
||||
'vaccin',
|
||||
|
@ -35,5 +41,5 @@ def test_referentials(conn, referentials, ref):
|
|||
for item in res['data']:
|
||||
assert 'id' in item
|
||||
assert 'text' in item
|
||||
if ref not in ['street', 'county']:
|
||||
if ref not in ['street', 'county', 'nursery']:
|
||||
assert diff(res['data'], 'test_read_%s_list.json' % ref)
|
||||
|
|
|
@ -36,7 +36,7 @@ def test_link(conn, update_data):
|
|||
res = resp.json()
|
||||
assert res['err'] == 1
|
||||
assert res['err_class'] == 'passerelle.utils.soap.SOAPFault'
|
||||
assert "E02 : Le dossier numéro [999999] ne correspond à aucune famille" in res['err_desc']
|
||||
assert 'E02 : Le dossier numéro [999999] ne correspond à aucune famille' in res['err_desc']
|
||||
|
||||
# wrong DUI firstname
|
||||
payload = {
|
||||
|
|
|
@ -15,7 +15,7 @@ FAMILY_RESET_PAYLOAD = {
|
|||
'rl1': {
|
||||
'civility': 'MR', # no effect
|
||||
'firstname': 'Marge', # must be
|
||||
'lastname': 'Simpson', # must be
|
||||
'lastname': 'Test_Simpson', # must be
|
||||
'maidenName': 'reset', # no effect
|
||||
'quality': 'AU',
|
||||
'birth': {
|
||||
|
@ -27,7 +27,7 @@ FAMILY_RESET_PAYLOAD = {
|
|||
'rl2': {
|
||||
'civility': 'MME', # no effect
|
||||
'firstname': 'Homer', # must be
|
||||
'lastname': 'Simpson', # must be
|
||||
'lastname': 'Test_Simpson', # must be
|
||||
'quality': 'AU',
|
||||
'birth': {
|
||||
'dateBirth': '1956-05-12', # must be
|
||||
|
@ -236,13 +236,18 @@ def test_update_family(conn, update_data):
|
|||
|
||||
def test_create_family(conn, create_data, update_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
# search the 'Test_Simpson' default test family
|
||||
resp = requests.get(conn + '/search-family?q=Test_Simpson')
|
||||
resp.raise_for_status()
|
||||
assert len(resp.json()['data']) >= 1
|
||||
assert any(data['RL1']['lastname'] == 'TEST_SIMPSON' for data in resp.json()['data'])
|
||||
|
||||
url = conn + '/create-family?NameID=%s' % create_data['name_id']
|
||||
|
||||
# RL1 already exists (on update_data) error
|
||||
unlink(conn, create_data['name_id'])
|
||||
payload = copy.deepcopy(create_data['family_payload'])
|
||||
payload['rl1']['lastname'] = 'Simpson'
|
||||
payload['rl1']['lastname'] = 'Test_Simpson'
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
|
@ -263,7 +268,7 @@ def test_create_family(conn, create_data, update_data):
|
|||
|
||||
def test_is_rl_exists(conn, update_data):
|
||||
url = conn + '/is-rl-exists'
|
||||
payload = {'firstname': 'Marge', 'lastname': 'Simpson', 'dateBirth': '1950-10-01'}
|
||||
payload = {'firstname': 'Marge', 'lastname': 'Test_Simpson', 'dateBirth': '1950-10-01'}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'err': 0, 'data': True}
|
||||
|
@ -280,7 +285,7 @@ def test_is_rl_exists(conn, update_data):
|
|||
assert resp.json() == {'err': 0, 'data': False}
|
||||
|
||||
# test on rl2
|
||||
payload = {'firstname': 'Homer', 'lastname': 'Simpson', 'dateBirth': '1956-05-12'}
|
||||
payload = {'firstname': 'Homer', 'lastname': 'Test_Simpson', 'dateBirth': '1956-05-12'}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'err': 0, 'data': True}
|
||||
|
@ -304,7 +309,7 @@ def test_create_rl2(conn, create_data, update_data):
|
|||
assert diff_rlg(conn, create_data['name_id'], 2, 'test_create_rl2.json')
|
||||
|
||||
|
||||
@pytest.mark.parametrize("rl", ['1', '2'])
|
||||
@pytest.mark.parametrize('rl', ['1', '2'])
|
||||
def test_update_rlg(conn, update_data, rl):
|
||||
rlg = 'rl' + rl
|
||||
RLG = 'RL' + rl
|
||||
|
@ -365,7 +370,7 @@ def test_update_rlg(conn, update_data, rl):
|
|||
in res['err_desc']
|
||||
)
|
||||
else:
|
||||
assert "La date de naissance ne peut pas être modifiée" in res['err_desc']
|
||||
assert 'La date de naissance ne peut pas être modifiée' in res['err_desc']
|
||||
|
||||
# restore RL1
|
||||
payload = copy.deepcopy(update_data['family_payload'][rlg])
|
||||
|
@ -454,7 +459,7 @@ def test_create_child(conn, create_data, update_data):
|
|||
assert 'E65 : Il existe déjà un enfant correspondant' in res['err_desc']
|
||||
|
||||
# child already exists error (Lisa form update_data)
|
||||
payload['lastname'] = 'Simpson'
|
||||
payload['lastname'] = 'Test_Simpson'
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
|
@ -613,6 +618,24 @@ def test_update_child_medical_record(conn, update_data):
|
|||
update_data['bart_num'],
|
||||
)
|
||||
|
||||
# update only doctor
|
||||
# #2720: allergies comments, and observations are erased
|
||||
payload = {
|
||||
'familyDoctor': {
|
||||
'name': 'Hibbert',
|
||||
'phone': '0656785678',
|
||||
'address': {
|
||||
'street1': 'General Hospital',
|
||||
'zipcode': '90701',
|
||||
'town': 'Springfield',
|
||||
},
|
||||
},
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert diff_child(conn, update_data['name_id'], 0, 'test_update_child_doctor.json', key='medicalRecord')
|
||||
|
||||
# reset medical record
|
||||
payload = FAMILY_RESET_PAYLOAD['childList'][0]['medicalRecord']
|
||||
resp = requests.post(url, json=payload)
|
||||
|
@ -776,21 +799,22 @@ def test_update_quotient(conn, create_data):
|
|||
'dateStart': '2022-01-01',
|
||||
'dateEnd': '2022-12-31',
|
||||
'mtt': '1500.33',
|
||||
'cdquo': '1',
|
||||
'cdquo': '2',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = read_family(conn, create_data['name_id'])
|
||||
assert data['RL1']['quotientList'] == [
|
||||
assert len(data['RL1']['quotientList']) == 2
|
||||
assert data['RL1']['quotients']['2'] == [
|
||||
{
|
||||
'yearRev': 2021,
|
||||
'dateStart': '2022-01-01T00:00:00+01:00',
|
||||
'dateEnd': '2022-12-31T00:00:00+01:00',
|
||||
'mtt': 1500.33,
|
||||
'cdquo': '1',
|
||||
'cdquo': '2',
|
||||
'codeUti': None,
|
||||
'cdquo_text': 'Revenus fiscaux',
|
||||
'cdquo_text': 'Revenus Petite enfance',
|
||||
}
|
||||
]
|
||||
|
||||
|
@ -800,7 +824,7 @@ def test_update_quotient(conn, create_data):
|
|||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = read_family(conn, create_data['name_id'])
|
||||
assert len(data['RL1']['quotientList']) == 2
|
||||
assert len(data['RL1']['quotients']['2']) == 2
|
||||
|
||||
# add quotient on another income year
|
||||
payload['yearRev'] = '2020'
|
||||
|
@ -808,7 +832,7 @@ def test_update_quotient(conn, create_data):
|
|||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = diff_rlg(conn, create_data['name_id'], 1, 'test_update_quotient.json', 'quotientList')
|
||||
assert len(data['RL1']['quotientList']) == 3
|
||||
assert len(data['RL1']['quotients']['2']) == 3
|
||||
|
||||
# test read-family with reference year
|
||||
url = conn + '/read-family?NameID=%s&income_year=%s' % (create_data['name_id'], '2020')
|
||||
|
@ -908,7 +932,7 @@ def test_read_family_members(conn, update_data):
|
|||
assert res['data']['personInfo']['firstname'] == 'ABRAHAM JEBEDIAH'
|
||||
|
||||
|
||||
def test_add_supplied_document(conn, create_data):
|
||||
def test_supplied_document(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
|
@ -916,6 +940,8 @@ def test_add_supplied_document(conn, create_data):
|
|||
payload = {
|
||||
'documentList/0/code': '46',
|
||||
'documentList/0/depositDate': '2022-12-20',
|
||||
'documentList/0/visaDate': '2022-12-21',
|
||||
'documentList/0/validityDate': '2022-12-22',
|
||||
'documentList/0/file': { # w.c.s. file field
|
||||
'filename': '201x201.jpg',
|
||||
'content_type': 'image/jpeg',
|
||||
|
@ -929,6 +955,7 @@ def test_add_supplied_document(conn, create_data):
|
|||
assert res['err'] == 0
|
||||
|
||||
# push on RL
|
||||
payload['documentList/0/code'] = '85'
|
||||
payload['numPerson'] = create_data['rl1_num']
|
||||
url = conn + '/add-supplied-document?NameID=%s' % create_data['name_id']
|
||||
resp = requests.post(url, json=payload)
|
||||
|
@ -937,9 +964,43 @@ def test_add_supplied_document(conn, create_data):
|
|||
assert res['err'] == 0
|
||||
|
||||
# push on child
|
||||
payload['documentList/0/code'] = '69'
|
||||
payload['numPerson'] = create_data['bart_num']
|
||||
url = conn + '/add-supplied-document?NameID=%s' % create_data['name_id']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
||||
# check validity on family
|
||||
params = {
|
||||
'code': '46',
|
||||
'ref_date': '2022-12-22',
|
||||
}
|
||||
url = conn + '/read-supplied-document-validity?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
||||
# check validity on RL
|
||||
params = {
|
||||
'code': '85',
|
||||
'person_id': create_data['rl1_num'],
|
||||
'ref_date': '2022-12-22',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
||||
# check validity on child
|
||||
params = {
|
||||
'code': '69',
|
||||
'person_id': create_data['bart_num'],
|
||||
'ref_date': '2022-12-22',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
|
|
@ -1,174 +0,0 @@
|
|||
import datetime
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
def test_perisco(perisco_subscribe_info):
|
||||
assert perisco_subscribe_info['info']['activity']['libelle1'] == 'TEMPS DU MIDI 22/23'
|
||||
|
||||
|
||||
def test_perisco_agenda(conn, create_data, perisco_subscribe_info):
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_info['place']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# find first available booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
booking = None
|
||||
for booking in resp.json()['data']:
|
||||
if booking['disabled'] is False:
|
||||
break
|
||||
else:
|
||||
raise Exception("no booking available")
|
||||
assert booking['details']['activity_id'] == perisco_subscribe_info['activity']['id']
|
||||
assert booking['prefill'] is False
|
||||
|
||||
# book activity
|
||||
url = conn + '/update-child-agenda?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [booking['id']],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {
|
||||
'updated': True,
|
||||
'count': 1,
|
||||
'changes': [
|
||||
{
|
||||
'booked': True,
|
||||
'activity_id': booking['details']['activity_id'],
|
||||
'activity_label': 'Restauration scolaire',
|
||||
'day': booking['details']['day_str'],
|
||||
}
|
||||
],
|
||||
'err': 0,
|
||||
}
|
||||
|
||||
# check booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [x['prefill'] for x in resp.json()['data'] if x['id'] == booking['id']][0] is True
|
||||
|
||||
|
||||
def test_perisco_recurrent_week(conn, create_data, perisco_subscribe_info, reference_year):
|
||||
# no subscribed activity
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 0
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_info['place']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['id'] == perisco_subscribe_info['activity']['id']
|
||||
|
||||
# get recurent-week gabarit
|
||||
url = conn + '/get-recurrent-week?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'ref_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [(x['id'], x['day']) for x in resp.json()['data']] == [
|
||||
('1-X', 'Lundi'),
|
||||
('2-X', 'Mardi'),
|
||||
('4-X', 'Jeudi'),
|
||||
('5-X', 'Vendredi'),
|
||||
]
|
||||
|
||||
# no booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['maggie_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert not any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
# set recurent-week gabarit
|
||||
url = conn + '/update-recurrent-week?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'recurrent_week': ['1-X', '2-X'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
|
||||
# there is now some bookings
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['maggie_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert any(x['prefill'] for x in resp.json()['data'])
|
|
@ -0,0 +1,192 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import link, unlink
|
||||
|
||||
|
||||
def test_create_nursery_demand_on_existing_child(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/get-nursery-geojson'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
nurseries = resp.json()['features']
|
||||
assert len(nurseries) >= 2
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
nb_childs = len(res['data']['childList'])
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == []
|
||||
|
||||
url = conn + '/read-child?NameID=%s&child_id=%s' % (create_data['name_id'], create_data['maggie_num'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == []
|
||||
|
||||
url = conn + '/create-nursery-demand'
|
||||
payload = {
|
||||
'family_id': create_data['family_id'],
|
||||
'family_indicators/0/code': 'APE_FIRSTC',
|
||||
'family_indicators/0/isActive': True,
|
||||
'child_id': create_data['maggie_num'],
|
||||
'demand_indicators/0/code': 'APE_COMPO1',
|
||||
'demand_indicators/0/isActive': True,
|
||||
'start_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
'number_of_days': '2',
|
||||
'start_hour_Mon': '08:00',
|
||||
'end_hour_Mon': '',
|
||||
'comment': 'bla',
|
||||
'accept_other_nurseries': True,
|
||||
'nursery1/idActivity': nurseries[0]['properties']['activity_id'],
|
||||
'nursery1/idUnit': nurseries[0]['properties']['unit_id'],
|
||||
'nursery1/idPlace': nurseries[0]['properties']['place_id'],
|
||||
'nursery2/idActivity': nurseries[1]['properties']['activity_id'],
|
||||
'nursery2/idUnit': nurseries[1]['properties']['unit_id'],
|
||||
'nursery2/idPlace': nurseries[1]['properties']['place_id'],
|
||||
'nursery3/idActivity': '',
|
||||
'nursery3/idUnit': '',
|
||||
'nursery3/idPlace': '',
|
||||
# indicators
|
||||
'child_indicators/0/code': 'APE_HBOTH',
|
||||
'child_indicators/0/isActive': True,
|
||||
'child_indicators/1/code': 'APE_HPAR',
|
||||
'child_indicators/1/isActive': True,
|
||||
'child_indicators/2/code': 'APE_COMPO3',
|
||||
'child_indicators/2/isActive': True,
|
||||
'child_indicators/3/code': 'APE_MULTIACC',
|
||||
'child_indicators/3/isActive': True,
|
||||
'family_indicators/0/code': 'APE_COMPO4',
|
||||
'family_indicators/0/isActive': True,
|
||||
'family_indicators/1/code': 'APE_NAIM',
|
||||
'family_indicators/1/isActive': True,
|
||||
'family_indicators/2/code': 'APE_FIRSTC',
|
||||
'family_indicators/2/isActive': True,
|
||||
'family_indicators/3/code': 'APE_COMPO2',
|
||||
'family_indicators/3/isActive': True,
|
||||
'family_indicators/4/code': 'APE_HAND',
|
||||
'family_indicators/4/isActive': True,
|
||||
'demand_indicators/0/code': 'APE_FRAT',
|
||||
'demand_indicators/0/isActive': True,
|
||||
'demand_indicators/1/code': 'APE_COMPO1',
|
||||
'demand_indicators/1/isActive': True,
|
||||
'demand_indicators/2/code': 'APE_HFRAT',
|
||||
'demand_indicators/2/isActive': True,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'data': None, 'err': 0}
|
||||
|
||||
# no child added
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert len(res['data']['childList']) == nb_childs
|
||||
|
||||
# check indicators
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == [
|
||||
'APE_COMPO2',
|
||||
'APE_COMPO4',
|
||||
'APE_FIRSTC',
|
||||
'APE_HAND',
|
||||
'APE_NAIM',
|
||||
]
|
||||
|
||||
url = conn + '/read-child?NameID=%s&child_id=%s' % (create_data['name_id'], create_data['maggie_num'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == [
|
||||
'APE_COMPO3',
|
||||
'APE_HBOTH',
|
||||
'APE_HPAR',
|
||||
'APE_MULTIACC',
|
||||
]
|
||||
|
||||
|
||||
def test_create_nursery_demand_adding_new_child(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/get-nursery-geojson'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
nurseries = resp.json()['features']
|
||||
assert len(nurseries) >= 2
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
nb_childs = len(res['data']['childList'])
|
||||
assert 'NELSON' not in [x['firstname'] for x in res['data']['childList']]
|
||||
|
||||
url = conn + '/create-nursery-demand'
|
||||
payload = {
|
||||
'family_id': create_data['family_id'],
|
||||
'child_first_name': 'Nelson',
|
||||
'child_last_name': 'Muntz',
|
||||
'child_birthdate': '2013-10-31',
|
||||
'child_gender': 'G',
|
||||
'start_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
'nursery1/idActivity': nurseries[0]['properties']['activity_id'],
|
||||
'nursery1/idUnit': nurseries[0]['properties']['unit_id'],
|
||||
'nursery1/idPlace': nurseries[0]['properties']['place_id'],
|
||||
'nursery2/idActivity': nurseries[1]['properties']['activity_id'],
|
||||
'nursery2/idUnit': nurseries[1]['properties']['unit_id'],
|
||||
'nursery2/idPlace': nurseries[1]['properties']['place_id'],
|
||||
'nursery3/idActivity': '',
|
||||
'nursery3/idUnit': '',
|
||||
'nursery3/idPlace': '',
|
||||
# indicators
|
||||
'child_indicators/0/code': 'APE_HBOTH',
|
||||
'child_indicators/0/isActive': True,
|
||||
'child_indicators/1/code': 'APE_HPAR',
|
||||
'child_indicators/1/isActive': True,
|
||||
'child_indicators/2/code': 'APE_COMPO3',
|
||||
'child_indicators/2/isActive': True,
|
||||
'child_indicators/3/code': 'APE_MULTIACC',
|
||||
'child_indicators/3/isActive': True,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert res['err'] == 0
|
||||
child_id = resp.json()['data']
|
||||
assert child_id is not None
|
||||
|
||||
# a new child is created on family
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert len(res['data']['childList']) == nb_childs + 1
|
||||
assert 'NELSON' in [x['firstname'] for x in res['data']['childList']]
|
||||
assert res['data']['childList'][nb_childs]['num'] == child_id
|
||||
|
||||
# check child indicators
|
||||
url = conn + '/read-child?NameID=%s&child_id=%s' % (create_data['name_id'], child_id)
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert res['data']['firstname'] == 'NELSON'
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == [
|
||||
'APE_COMPO3',
|
||||
'APE_HBOTH',
|
||||
'APE_HPAR',
|
||||
'APE_MULTIACC',
|
||||
]
|
|
@ -1,24 +0,0 @@
|
|||
import datetime
|
||||
|
||||
import requests
|
||||
|
||||
from .conftest import link, unlink
|
||||
|
||||
# LOISIR is a subset of EXTRACO, we only test the genaral catalog cell here
|
||||
|
||||
|
||||
def test_catalog_general_loisirs(conn, update_data):
|
||||
unlink(conn, update_data['name_id'])
|
||||
link(conn, update_data)
|
||||
url = conn + '/read-activity-list'
|
||||
params = {'ref_date': datetime.date.today().strftime('%Y-%m-%d')}
|
||||
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
[x['text'] for x in resp.json()['data']] == [
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 13h45/17h - 8/15Ans, ARGOULETS',
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 14h/16h30 - 10/15Ans, LA RAMEE',
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 15h30/17h - 8/15Ans, ARGOULETS',
|
||||
]
|
|
@ -0,0 +1,308 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def school_year(conn):
|
||||
url = conn + '/read-school-years-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
year = res['data'][0]['text']
|
||||
return year
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def exemption(conn):
|
||||
# get an exemption code
|
||||
url = conn + '/read-exemption-reasons-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
return res['data'][0]['id']
|
||||
|
||||
|
||||
def test_displaying_school_subscribed(conn, create_data, school_year, exemption):
|
||||
"""
|
||||
Read-family ramène les inscriptions aux date de visualisation paramétrées
|
||||
sur le référential YearSchool
|
||||
"""
|
||||
school_year = str(int(school_year) + 1)
|
||||
|
||||
# create a 7 year-old child
|
||||
url = conn + '/create-child?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'sexe': 'F',
|
||||
'firstname': 'Claris',
|
||||
'lastname': create_data['lastname'],
|
||||
'birth': {'dateBirth': '2016-09-12'},
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
claris_id = str(resp.json()['data']['child_id'])
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration'
|
||||
payload = {
|
||||
'numPerson': claris_id,
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CE1',
|
||||
'dateSubscribe': school_year + '-01-01',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data']['returnMessage'] is None
|
||||
assert resp.json()['data']['subscribeSchoolBean']['schoolName'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
assert resp.json()['data']['subscribeSchoolBean']['adresse'] == '101 GRANDE-RUE SAINT MICHEL'
|
||||
|
||||
# get Claris school from read-family
|
||||
url = conn + '/read-school-years-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()['data']
|
||||
date_start = [x['dateStartYearSchool'] for x in res if x['text'] == school_year][0]
|
||||
assert date_start[10] > datetime.datetime.now().strftime('%Y-%m-%d')
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
schools = [x['subscribeSchoolList'] for x in res['data']['childList'] if x['num'] == claris_id][0]
|
||||
assert len(schools) == 0 # school is filtered, but it is related to an hidden school year
|
||||
# field, not dateStartYearSchool, checked before : #2425
|
||||
|
||||
|
||||
def test_school_pre_registration_by_sector(conn, create_data, school_year, exemption):
|
||||
"""
|
||||
Pré-inscription de l'enfant de 7 ans dans son secteur
|
||||
"""
|
||||
# create a 7 year-old child
|
||||
url = conn + '/create-child?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'sexe': 'F',
|
||||
'firstname': 'Sego',
|
||||
'lastname': create_data['lastname'],
|
||||
'birth': {'dateBirth': '2016-05-09'},
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
sego_id = str(resp.json()['data']['child_id'])
|
||||
|
||||
# assert there is a school at this address
|
||||
url = conn + '/read-schools-for-address-and-level'
|
||||
params = {
|
||||
'id_street': '2317',
|
||||
'num': '4',
|
||||
'year': school_year,
|
||||
'level': 'CE1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['text'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
|
||||
# assert there is a school at child address
|
||||
url = conn + '/read-schools-for-child-and-level'
|
||||
params = {
|
||||
'child_id': sego_id,
|
||||
'year': school_year,
|
||||
'level': 'CE1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['text'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
school_id = resp.json()['data'][0]['idSchool']
|
||||
assert school_id == '2435'
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration'
|
||||
payload = {
|
||||
'numPerson': sego_id,
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CE1',
|
||||
'dateSubscribe': school_year + '-01-01',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data']['returnMessage'] is None
|
||||
assert resp.json()['data']['subscribeSchoolBean']['schoolName'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
assert resp.json()['data']['subscribeSchoolBean']['adresse'] == '101 GRANDE-RUE SAINT MICHEL'
|
||||
|
||||
# get Sego school from read-family
|
||||
url = conn + '/read-school-years-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()['data']
|
||||
date_start = [x['dateStartYearSchool'] for x in res if x['text'] == school_year][0]
|
||||
assert date_start[10] > datetime.datetime.now().strftime('%Y-%m-%d')
|
||||
# school is filtered, but it is related to an hidden school year
|
||||
# field, not dateStartYearSchool, see #2425
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
schools = [x['subscribeSchoolList'] for x in res['data']['childList'] if x['num'] == sego_id][0]
|
||||
assert len(schools) == 1
|
||||
assert schools[0]['schoolName'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
|
||||
"""
|
||||
Pré-inscription d'un enfant de 5 ans en CP avec rappprochement de fratrie pour celui de 7 ans :
|
||||
rapprochement dans le secteur de l'enfant.
|
||||
"""
|
||||
# get Sego school
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': sego_id,
|
||||
'year': school_year,
|
||||
'level': 'CE1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
schools = data['childSubscribeSchoolInformation']['subscribeSchoolYearList']
|
||||
assert len(schools) == 1
|
||||
assert schools[0]['subscribeSchool']['school']['idSchool'] == school_id
|
||||
assert schools[0]['subscribeSchool']['perim']['idPerim'] == '2707'
|
||||
|
||||
url = conn + '/create-child-school-pre-registration-with-sibling'
|
||||
payload = {
|
||||
'numPerson': create_data['maggie_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'GS',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idSchoolRequested': school_id,
|
||||
'numPersonSibling': sego_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert 'returnMessage' not in resp.json()
|
||||
assert resp.json()['data']['schoolName'] == 'CALAS MATERNELLE'
|
||||
assert resp.json()['data']['adresse'] == '47 RUE ACHILLE VIADIEU' # same sector
|
||||
|
||||
|
||||
def test_school_pre_registration_by_exemption(conn, create_data, school_year, exemption):
|
||||
"""
|
||||
Pré-inscription de l'enfant de 9 ans en dérogation :
|
||||
c'est une dérogation avec sélection du motif sur un établissement hors secteur
|
||||
"""
|
||||
# school list
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'year': school_year,
|
||||
'level': 'CM1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
schools = data['childSubscribeSchoolInformation']['subscribeSchoolInformation']['derogSchoolList']
|
||||
assert len(schools) > 1
|
||||
school_id = schools[0]['id']
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration-with-exemption'
|
||||
payload = {
|
||||
'numPerson': create_data['bart_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CM1',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idRequestSchool1': school_id,
|
||||
'derogReasonCode': exemption,
|
||||
'derogComment': 'bla',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert 'returnMessage' not in resp.json()
|
||||
assert resp.json()['data']['schoolName'] == 'AMIDONNIERS ELEMENTAIRE'
|
||||
assert resp.json()['data']['adresse'] == '123 ALL DE BRIENNE'
|
||||
|
||||
"""
|
||||
Pré-inscription de l'autre enfant de 5 ans en CP
|
||||
avec rapprochement de fratrie pour celui de 9 ans :
|
||||
rapprochement hors du secteur de l'enfant.
|
||||
"""
|
||||
|
||||
# check E124 error
|
||||
# get a school that do not provide a level in its sector
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['hugo_num'],
|
||||
'year': school_year,
|
||||
'level': 'GS',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert [
|
||||
x['idSchool']
|
||||
for x in data['childSubscribeSchoolInformation']['subscribeSchoolInformation']['derogSchoolList']
|
||||
if x['text'] == 'DIEUZAIDE JEAN MATERNELLE'
|
||||
] == ['2437']
|
||||
|
||||
# try to book on a sector that do not provide the requested level
|
||||
url = conn + '/create-child-school-pre-registration-with-sibling'
|
||||
payload = {
|
||||
'numPerson': create_data['hugo_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CP',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idSchoolRequested': '2437',
|
||||
'numPersonSibling': create_data['bart_num'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 1
|
||||
assert resp.json()['err_class'] == 'passerelle.utils.soap.SOAPFault'
|
||||
assert 'E124' in resp.json()['err_desc']
|
||||
|
||||
# get Bart school
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'year': school_year,
|
||||
'level': 'CM1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
schools = data['childSubscribeSchoolInformation']['subscribeSchoolYearList']
|
||||
assert len(schools) == 1
|
||||
assert schools[0]['subscribeSchool']['school']['idSchool'] == school_id
|
||||
assert schools[0]['subscribeSchool']['perim']['idPerim'] == '2663'
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration-with-sibling'
|
||||
payload = {
|
||||
'numPerson': create_data['hugo_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'GS',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idSchoolRequested': school_id,
|
||||
'numPersonSibling': create_data['bart_num'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert 'returnMessage' not in resp.json()
|
||||
assert resp.json()['data']['schoolName'] == 'AMIDONNIERS MATERNELLE'
|
||||
assert resp.json()['data']['adresse'] == '125 ALL DE BRIENNE'
|
|
@ -0,0 +1,369 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import link, unlink
|
||||
|
||||
|
||||
def test_perisco(perisco_subscribe_info):
|
||||
assert perisco_subscribe_info['info']['activity']['libelle1'] == 'TEST TEMPS DU MIDI 22/23'
|
||||
|
||||
|
||||
def test_perisco_adulte(perisco_subscribe_adulte_info):
|
||||
assert perisco_subscribe_adulte_info['info']['activity']['libelle1'] == 'TEST RESTAURATION ADULTE 22/23'
|
||||
|
||||
|
||||
def test_perisco_agenda(conn, create_data, perisco_subscribe_info):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_info['place']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# find first available booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
booking = None
|
||||
for booking in resp.json()['data']:
|
||||
if booking['disabled'] is False:
|
||||
break
|
||||
else:
|
||||
raise Exception('no booking available')
|
||||
assert booking['details']['activity_id'] == perisco_subscribe_info['activity']['id']
|
||||
assert booking['details']['activity_label'] == 'Temps du midi'
|
||||
assert booking['prefill'] is False
|
||||
|
||||
# book activity
|
||||
url = conn + '/update-child-agenda?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [booking['id']],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {
|
||||
'updated': True,
|
||||
'count': 1,
|
||||
'changes': [
|
||||
{
|
||||
'booked': True,
|
||||
'activity_id': booking['details']['activity_id'],
|
||||
'activity_label': 'Temps du midi',
|
||||
'day': booking['details']['day_str'],
|
||||
}
|
||||
],
|
||||
'err': 0,
|
||||
}
|
||||
|
||||
# check booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [x['prefill'] for x in resp.json()['data'] if x['id'] == booking['id']][0] is True
|
||||
|
||||
|
||||
def test_perisco_agenda_adulte(conn, create_data2, perisco_subscribe_adulte_info):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['rl1_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_adulte_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_adulte_info['place']['id'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# find first available booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl1_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
booking = None
|
||||
for booking in resp.json()['data']:
|
||||
if booking['disabled'] is False:
|
||||
break
|
||||
else:
|
||||
raise Exception('no booking available')
|
||||
assert booking['details']['activity_id'] == perisco_subscribe_adulte_info['activity']['id']
|
||||
assert booking['details']['activity_label'] == 'RESTAURATION ADULTE'
|
||||
assert booking['prefill'] is False
|
||||
|
||||
# book activity
|
||||
url = conn + '/update-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'child_id': create_data2['rl1_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [booking['id']],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {
|
||||
'updated': True,
|
||||
'count': 1,
|
||||
'changes': [
|
||||
{
|
||||
'booked': True,
|
||||
'activity_id': booking['details']['activity_id'],
|
||||
'activity_label': 'RESTAURATION ADULTE',
|
||||
'day': booking['details']['day_str'],
|
||||
}
|
||||
],
|
||||
'err': 0,
|
||||
}
|
||||
|
||||
# check booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl1_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [x['prefill'] for x in resp.json()['data'] if x['id'] == booking['id']][0] is True
|
||||
|
||||
|
||||
def test_perisco_recurrent_week(conn, create_data, perisco_subscribe_info, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
# no subscribed activity
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 0
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_info['place']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['id'] == perisco_subscribe_info['activity']['id']
|
||||
assert [(x['text'], x['libelle'], x['libelle2']) for x in resp.json()['data']] == [
|
||||
('Temps du midi', 'TEST TEMPS DU MIDI 22/23', 'Temps du midi'),
|
||||
]
|
||||
|
||||
# get recurent-week gabarit
|
||||
url = conn + '/get-recurrent-week?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'ref_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [(x['id'], x['day']) for x in resp.json()['data']] == [
|
||||
('1-X', 'Lundi'),
|
||||
('2-X', 'Mardi'),
|
||||
('4-X', 'Jeudi'),
|
||||
('5-X', 'Vendredi'),
|
||||
]
|
||||
|
||||
# no booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['maggie_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert not any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
# set recurent-week gabarit
|
||||
url = conn + '/update-recurrent-week?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'recurrent_week': ['1-X', '2-X'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
|
||||
# there is now some bookings
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['maggie_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
|
||||
def test_perisco_recurrent_week_adulte(conn, create_data2, perisco_subscribe_adulte_info, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
# no subscribed activity
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 0
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_adulte_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_adulte_info['place']['id'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['id'] == perisco_subscribe_adulte_info['activity']['id']
|
||||
assert [(x['text'], x['libelle'], x['libelle2']) for x in resp.json()['data']] == [
|
||||
('RESTAURATION ADULTE', 'TEST RESTAURATION ADULTE 22/23', 'RESTAURATION ADULTE')
|
||||
]
|
||||
|
||||
# get recurent-week gabarit
|
||||
url = conn + '/get-recurrent-week?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'ref_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [(x['id'], x['day']) for x in resp.json()['data']] == [
|
||||
('1-X', 'Lundi'),
|
||||
('2-X', 'Mardi'),
|
||||
('3-X', 'Mercredi'),
|
||||
('4-X', 'Jeudi'),
|
||||
('5-X', 'Vendredi'),
|
||||
]
|
||||
|
||||
# no booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl2_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert not any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
# set recurent-week gabarit
|
||||
url = conn + '/update-recurrent-week?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
'recurrent_week': ['1-X', '2-X'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
|
||||
# there is now some bookings
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl2_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert any(x['prefill'] for x in resp.json()['data'])
|
|
@ -1,205 +0,0 @@
|
|||
import requests
|
||||
|
||||
|
||||
def test_basket_subscribe(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'EXTRASCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_bookings(person_id):
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 3
|
||||
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert data[0]['text'] == 'ENFANCE LOISIRS ET PE'
|
||||
assert len(data[0]['lignes']) == 3
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
# get 3 idIns because we subscribe a generic unit
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 3
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# cannot subscribe Bart twice
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 1
|
||||
assert 'E1019' in resp.json()['err_desc']
|
||||
assert len(get_baskets()) == 1
|
||||
|
||||
# delete basket
|
||||
# should be call by user or by cron job
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# delete (generic) basket line for Bart
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(data[0]['lignes']) == 6
|
||||
basket_id = data[0]['id']
|
||||
# idIns for the generic unit
|
||||
line_id = [
|
||||
y['id']
|
||||
for x in data
|
||||
for y in x['lignes']
|
||||
if y['personneInfo']['numPerson'] == int(create_data['bart_num'])
|
||||
if y['inscription']['idUnit'] == extrasco_subscribe_info['unit']['id']
|
||||
][0]
|
||||
url = conn + '/delete-basket-line?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'basket_id': basket_id,
|
||||
'line_id': line_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['lignes']}) == 3
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(get_baskets()) == 1
|
||||
assert len(data[0]['lignes']) == 3
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# re-subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# add bookings to Bart
|
||||
slots = [x['id'] for x in extrasco_subscribe_info['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['bart_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# add bookings to Maggie
|
||||
slots = [':'.join([create_data['maggie_num']] + x.split(':')[1:]) for x in slots]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['maggie_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# validate basket
|
||||
url = conn + '/validate-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data['idInsLst']) == 6
|
||||
assert len(data['factureLst']) == 0 # No invoice #2187
|
||||
assert get_baskets() == []
|
||||
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# call cancelInvoiceAndDeleteSubscribeList de remove subscriptions
|
|
@ -0,0 +1,261 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import get_subscription_info, link, unlink
|
||||
|
||||
# LOISIR is like EXTRACO (most tests are redondants) but :
|
||||
# * there is no calendar (days) to provide.
|
||||
# * there is a general catalog to display
|
||||
|
||||
|
||||
def test_catalog_general_loisirs(conn, update_data):
|
||||
unlink(conn, update_data['name_id'])
|
||||
link(conn, update_data)
|
||||
url = conn + '/read-activity-list'
|
||||
params = {'ref_date': datetime.date.today().strftime('%Y-%m-%d')}
|
||||
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
labels = [x['text'] for x in resp.json()['data']]
|
||||
assert (
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 13h45/17h - 8/15Ans, ARGOULETS'
|
||||
in labels
|
||||
)
|
||||
assert (
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 14h/16h30 - 10/15Ans, LA RAMEE'
|
||||
in labels
|
||||
)
|
||||
assert (
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 15h30/17h - 8/15Ans, ARGOULETS'
|
||||
in labels
|
||||
)
|
||||
assert 'Promenade forêt enchantée, TEST promenade forêt enchantée, TERRITOIRE OUEST' in labels
|
||||
assert 'Vitrail Fusing 1/2 Je Adultes, Inscription annuelle, Centre Culturel ALBAN MINVILLE' in labels
|
||||
|
||||
for item in resp.json()['data']:
|
||||
if (
|
||||
item['text']
|
||||
== 'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 13h45/17h - 8/15Ans, ARGOULETS'
|
||||
):
|
||||
assert item['criterias'] == {
|
||||
'service': {'text': 'Service', 'data': {'sports': 'Sports'}, 'order': ['sports']},
|
||||
'nature': {
|
||||
'text': "Nature de l'activité",
|
||||
'data': {'1': 'Activités Régulières'},
|
||||
'order': ['1'],
|
||||
},
|
||||
'type': {
|
||||
'text': "Type de l'activité",
|
||||
'data': {'activites-aquatiques': 'Activités Aquatiques'},
|
||||
'order': ['activites-aquatiques'],
|
||||
},
|
||||
'public': {
|
||||
'text': 'Public',
|
||||
'data': {'1': 'Enfant (3-11 ans)', '2': 'Ado (12-17 ans)'},
|
||||
'order': ['1', '2'],
|
||||
},
|
||||
'day': {'text': 'Jours', 'data': {'3': 'Mercredi'}, 'order': ['3']},
|
||||
'place': {'text': 'Lieu', 'data': {'A10053179757': 'ARGOULETS'}, 'order': ['A10053179757']},
|
||||
}
|
||||
assert item['activity']['activityPortail']['blocNoteList'] == [
|
||||
{
|
||||
'note': "Activité ayant lieu le Mercredi, merci de choisir votre tranche horraire en fonction de l'âge de votre enfant.",
|
||||
'numIndex': 1,
|
||||
}
|
||||
]
|
||||
if item['text'] == 'Promenade forêt enchantée, TEST promenade forêt enchantée, TERRITOIRE OUEST':
|
||||
assert item['criterias'] == {
|
||||
'service': {'text': 'Service', 'data': {'sports': 'Sports'}, 'order': ['sports']},
|
||||
'nature': {
|
||||
'text': "Nature de l'activité",
|
||||
'data': {'1': 'Activités Régulières'},
|
||||
'order': ['1'],
|
||||
},
|
||||
'type': {
|
||||
'text': "Type de l'activité",
|
||||
'data': {'activite-pedestre': 'Activité Pédestre'},
|
||||
'order': ['activite-pedestre'],
|
||||
},
|
||||
'public': {'text': 'Public', 'data': {'5': 'Sénior (60 ans et plus)'}, 'order': ['5']},
|
||||
'day': {
|
||||
'text': 'Jours',
|
||||
'data': {'1': 'Lundi', '2': 'Mardi', '3': 'Mercredi', '4': 'Jeudi', '5': 'Vendredi'},
|
||||
'order': ['1', '2', '3', '4', '5'],
|
||||
},
|
||||
'place': {
|
||||
'text': 'Lieu',
|
||||
'data': {'A10056517597': 'TERRITOIRE OUEST'},
|
||||
'order': ['A10056517597'],
|
||||
},
|
||||
}
|
||||
assert item['activity']['activityPortail']['blocNoteList'] == [
|
||||
{'note': 'Activité de promenade en forêt.', 'numIndex': 1}
|
||||
]
|
||||
|
||||
|
||||
def test_catalog_personnalise_loisirs(loisirs_subscribe_info):
|
||||
assert (
|
||||
loisirs_subscribe_info['info']['activity']['libelle1']
|
||||
== 'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES'
|
||||
)
|
||||
assert loisirs_subscribe_info['info']['calendarGeneration']['code'] == 'REQUIRED'
|
||||
assert [(x['id'], x['day']) for x in loisirs_subscribe_info['info']['recurrent_week']] == []
|
||||
assert loisirs_subscribe_info['info']['billingInformation'] == {
|
||||
'modeFact': 'FORFAIT',
|
||||
'quantity': 1.0,
|
||||
'unitPrice': 88.5,
|
||||
}
|
||||
|
||||
|
||||
def test_catalog_personnalise_loisirs_not_allowed(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
try:
|
||||
get_subscription_info(
|
||||
'LOISIRS',
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES',
|
||||
'MERCREDI - 15h30/17h - 8/15Ans',
|
||||
'ARGOULETS',
|
||||
conn,
|
||||
create_data['name_id'],
|
||||
create_data['rl1_num'],
|
||||
reference_year,
|
||||
)
|
||||
except Exception:
|
||||
return
|
||||
assert False, 'Adult can subscribe to child activity'
|
||||
|
||||
|
||||
def test_direct_subscribe(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['hugo_num'],
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# no idIns provided to remove subscription later
|
||||
assert resp.json()['data'] == {'controlOK': True, 'message': None}
|
||||
|
||||
|
||||
def test_direct_subscribe_out_town(conn, create_data2, loisirs_subscribe_info2, reference_year):
|
||||
assert loisirs_subscribe_info2['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['hugo_num'],
|
||||
'activity_id': loisirs_subscribe_info2['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info2['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info2['place']['id'],
|
||||
'start_date': loisirs_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info2['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# no idIns provided to remove subscription later
|
||||
assert resp.json()['data'] == {'controlOK': True, 'message': None}
|
||||
|
||||
|
||||
def test_subscribe_to_basket(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
basket_id = resp.json()['data']['basket']['id']
|
||||
|
||||
# remove subscription
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
|
||||
@pytest.mark.xfail(run=False)
|
||||
def test_global_capacity(conn, create_data2, loisirs_subscribe_info3, reference_year):
|
||||
assert loisirs_subscribe_info3['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
# subscribe Bart
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
# url = conn + '/add-person-basket-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['bart_num'],
|
||||
'activity_id': loisirs_subscribe_info3['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info3['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info3['place']['id'],
|
||||
'start_date': loisirs_subscribe_info3['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info3['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
# basket_id = resp.json()['data']['basket']['id']
|
||||
|
||||
# subscribe Lisa
|
||||
payload['person_id'] = create_data2['lisa_num']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# subscribe Maggie
|
||||
payload['person_id'] = create_data2['maggie_num']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# can't subscribe Huggo
|
||||
payload['person_id'] = create_data2['hugo_num']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 1
|
||||
assert resp.json()['err_desc'] == ''
|
||||
|
||||
# check capacity on main catalog
|
||||
url = conn + '/read-activity-list'
|
||||
params = {'ref_date': datetime.date.today().strftime('%Y-%m-%d')}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
for item in resp.json()['data']:
|
||||
if item['activity']['libelle'] == 'PUBLIK Vitrail Fusing 1/2 Je Adultes 2022/2023 - Mardi 14h-1':
|
||||
import pdb
|
||||
|
||||
pdb.set_trace()
|
||||
|
||||
# # remove subscriptions
|
||||
# url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
# payload = {'basket_id': basket_id}
|
||||
# resp = requests.post(url, json=payload)
|
||||
# resp.raise_for_status()
|
||||
# assert resp.json()['err'] == 0
|
|
@ -1,14 +1,47 @@
|
|||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
def test_catalog_personnalise_extrasco(extrasco_subscribe_info):
|
||||
assert extrasco_subscribe_info['info']['activity']['libelle1'] == 'ADL ELEMENTAIRE Maourine Avril 2023'
|
||||
assert extrasco_subscribe_info['info']['calendarGeneration']['code'] == 'REQUIRED'
|
||||
assert (
|
||||
extrasco_subscribe_info['info']['activity']['libelle1']
|
||||
== 'PUBLIK ADL ELEMENTAIRE Maourine JUIN 22/23(NE PAS UTILISER)'
|
||||
)
|
||||
assert extrasco_subscribe_info['info']['calendarGeneration']['code'] == 'NOT_REQUIRED'
|
||||
assert extrasco_subscribe_info['info']['billingInformation'] == {
|
||||
'modeFact': 'PRESENCE',
|
||||
'quantity': None,
|
||||
'unitPrice': 43.0,
|
||||
'unitPrice': 11.5,
|
||||
}
|
||||
assert extrasco_subscribe_info['info']['activity']['blocNoteList'] == [
|
||||
{
|
||||
'note': 'Lien vers le réglement intérieur :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/03/23/flyer-sejour.pdf\r\nLien vers arrêté municipal :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/04/05/arrete-municipal.pdf',
|
||||
'numIndex': 1,
|
||||
}
|
||||
]
|
||||
assert (
|
||||
extrasco_subscribe_info['info']['agenda'][0]['details']['activity_label']
|
||||
== 'ADL ELEMENTAIRE Maourine Juin'
|
||||
)
|
||||
|
||||
|
||||
def test_catalog_personnalise_extrasco2(extrasco_subscribe_info2):
|
||||
assert (
|
||||
extrasco_subscribe_info2['info']['activity']['libelle1']
|
||||
== 'PUBLIK ADL MATERNELLE Lardenne JUIN 22/23 (NEPAS UTILISER)'
|
||||
)
|
||||
assert extrasco_subscribe_info2['info']['calendarGeneration']['code'] == 'FORBIDDEN'
|
||||
assert extrasco_subscribe_info2['info']['billingInformation'] == {
|
||||
'modeFact': 'PRESENCE',
|
||||
'quantity': None,
|
||||
'unitPrice': 11.5,
|
||||
}
|
||||
assert extrasco_subscribe_info2['info']['activity']['blocNoteList'] == [
|
||||
{
|
||||
'note': 'Lien vers le réglement intérieur :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/03/23/flyer-sejour.pdf\r\nLien vers arrêté municipal :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/04/05/arrete-municipal.pdf',
|
||||
'numIndex': 1,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_direct_subscribe(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
|
@ -71,16 +104,11 @@ def test_subscribe_with_conveyance(conn, create_data, extrasco_subscribe_info):
|
|||
def test_subscribe_with_recurrent_week(conn, create_data, extrasco_subscribe_info):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
assert [(x['id'], x['day']) for x in extrasco_subscribe_info['info']['recurrent_week']] == [
|
||||
('1-C', 'Lundi'),
|
||||
('1-B', 'Lundi'),
|
||||
('2-C', 'Mardi'),
|
||||
('2-B', 'Mardi'),
|
||||
('3-C', 'Mercredi'),
|
||||
('3-B', 'Mercredi'),
|
||||
('4-C', 'Jeudi'),
|
||||
('4-B', 'Jeudi'),
|
||||
('5-C', 'Vendredi'),
|
||||
('5-B', 'Vendredi'),
|
||||
('1-X', 'Lundi'),
|
||||
('2-X', 'Mardi'),
|
||||
('3-X', 'Mercredi'),
|
||||
('4-X', 'Jeudi'),
|
||||
('5-X', 'Vendredi'),
|
||||
]
|
||||
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
|
@ -91,7 +119,7 @@ def test_subscribe_with_recurrent_week(conn, create_data, extrasco_subscribe_inf
|
|||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'recurrent_week': ['1-B', '2-C'],
|
||||
'recurrent_week': ['1-X', '2-X'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
|
@ -111,6 +139,16 @@ def test_subscribe_with_recurrent_week(conn, create_data, extrasco_subscribe_inf
|
|||
assert resp.json()['err'] == 0
|
||||
assert any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
# check quantity into basket
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
line = resp.json()['data'][0]['lignes'][0]
|
||||
assert line['prixUnit'] == 11.5
|
||||
assert line['qte'] > 0
|
||||
assert line['montant'] == line['prixUnit'] * line['qte']
|
||||
|
||||
# remove subscription
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
|
@ -135,7 +173,23 @@ def test_subscribe_with_agenda(conn, create_data, extrasco_subscribe_info):
|
|||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# subscribe witout providing calandar
|
||||
def get_perisco_bookings():
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return [
|
||||
item
|
||||
for item in resp.json()['data']
|
||||
if item['details']['activity_id'] == extrasco_subscribe_info['activity']['id']
|
||||
]
|
||||
|
||||
# subscribe without providing calendar
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
|
@ -152,6 +206,7 @@ def test_subscribe_with_agenda(conn, create_data, extrasco_subscribe_info):
|
|||
|
||||
# no booking
|
||||
assert not any(x['prefill'] for x in get_bookings())
|
||||
assert not any(x['prefill'] for x in get_perisco_bookings())
|
||||
|
||||
# book using info calendar gabarit (booking registered from w.c.s. form)
|
||||
assert len(extrasco_subscribe_info['info']['agenda']) > 0
|
||||
|
@ -173,6 +228,17 @@ def test_subscribe_with_agenda(conn, create_data, extrasco_subscribe_info):
|
|||
|
||||
# there is now 2 bookings
|
||||
assert len([x['prefill'] for x in get_bookings() if x['prefill'] is True]) == 2
|
||||
perisco_bookings = get_perisco_bookings()
|
||||
assert len([x['prefill'] for x in perisco_bookings if x['prefill'] is True]) == 2
|
||||
assert perisco_bookings[0]['details']['activity_label'] == 'ADL ELEMENTAIRE Maourine Juin'
|
||||
|
||||
# check quantity into basket
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
line = resp.json()['data'][0]['lignes'][0]
|
||||
assert (line['prixUnit'], line['qte'], line['montant']) == (11.5, 0.0, 0.0)
|
||||
|
||||
# unbook slots
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
|
@ -196,3 +262,61 @@ def test_subscribe_with_agenda(conn, create_data, extrasco_subscribe_info):
|
|||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
|
||||
@pytest.mark.xfail(run=False)
|
||||
def test_daily_capacity(conn, create_data2, extrasco_subscribe_info3):
|
||||
assert extrasco_subscribe_info3['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def subscribe(child):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['%s_num' % child],
|
||||
'activity_id': extrasco_subscribe_info3['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info3['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info3['place']['id'],
|
||||
'start_date': extrasco_subscribe_info3['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info3['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']['basket']['id']
|
||||
|
||||
def book(child, slot):
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['%s_num' % child],
|
||||
'activity_id': extrasco_subscribe_info3['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info3['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info3['unit']['dateEnd'][:10],
|
||||
'booking_list': [slot],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
# subscribe all family childs
|
||||
basket_id = subscribe('bart')
|
||||
for child in 'lisa', 'maggie', 'hugo':
|
||||
assert subscribe(child) == basket_id
|
||||
|
||||
# book all childs on the same day
|
||||
assert len(extrasco_subscribe_info3['info']['agenda']) > 0
|
||||
assert not any(x['prefill'] for x in extrasco_subscribe_info3['info']['agenda'])
|
||||
slots = [x['id'] for x in extrasco_subscribe_info3['info']['agenda'] if x['disabled'] is False]
|
||||
for child in 'bart', 'lisa', 'maggie':
|
||||
resp = book(child, slots[-1])
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True]
|
||||
resp = book('hugo', slots[-1])
|
||||
assert resp.json()['err'] == 1
|
||||
assert resp.json()['err_desc'] == 0
|
||||
|
||||
# # remove subscriptions
|
||||
# url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
# payload = {'basket_id': basket_id}
|
||||
# resp = requests.post(url, json=payload)
|
||||
# resp.raise_for_status()
|
||||
# assert resp.json()['err'] == 0
|
|
@ -1,109 +0,0 @@
|
|||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import diff, link, unlink
|
||||
|
||||
|
||||
def test_direct_debit_order(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'codeRegie': '102',
|
||||
'bank/bankBIC': 'BDFEFR2T',
|
||||
'bank/bankIBAN': 'FR7630001007941234567890185',
|
||||
'bank/bankRUM': 'xxx',
|
||||
'bank/dateStart': '2023-01-01',
|
||||
'bank/bankAddress': '75049 PARIS cedex 01',
|
||||
'bank/civility': 'x',
|
||||
'bank/lastName': 'Ewing',
|
||||
'bank/firstName': 'John Ross',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
url = conn + '/get-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'codeRegie': '102',
|
||||
'dateRef': '2023-01-01',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
res['data']['numPerson'] = 'N/A'
|
||||
assert diff(res['data'], 'test_get_rl1_direct_debit_order.json')
|
||||
|
||||
|
||||
@pytest.mark.xfail(run=False)
|
||||
def test_basket_subscribe(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 3
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert data[0]['text'] == 'ENFANCE LOISIRS ET PE'
|
||||
assert len(data[0]['lignes']) == 3
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
# get 3 idIns because we subscribe a generic unit
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 3
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# validate basket
|
||||
url = conn + '/validate-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data['idInsLst']) == 6
|
||||
assert len(data['factureLst']) == 0
|
||||
assert get_baskets() == []
|
||||
|
||||
# to continue :
|
||||
# cancelInvoiceAndDeleteSubscribeList
|
||||
# payInvoice
|
|
@ -0,0 +1,557 @@
|
|||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
def test_basket_subscribe_extrasco(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'EXTRASCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_bookings(person_id):
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1 # 3 sur Larden
|
||||
|
||||
subs = subscriptions(create_data['bart_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 1
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert data[0]['text'] == 'ENFANCE LOISIRS'
|
||||
assert len(data[0]['lignes']) == 1 # 3 sur Larden
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
# get 3 idIns because we subscribe a generic unit
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1 # 3 sur Larden
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# cannot subscribe Bart twice
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 1
|
||||
assert 'E1019' in resp.json()['err_desc']
|
||||
assert len(get_baskets()) == 1
|
||||
|
||||
# delete basket
|
||||
# should be call by user or by cron job
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
subs = subscriptions(create_data['maggie_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 1
|
||||
|
||||
# delete (generic) basket line for Bart
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(data[0]['lignes']) == 2 # 6 sur Larden
|
||||
basket_id = data[0]['id']
|
||||
# line for the generic unit for Bart
|
||||
line_id = [
|
||||
y['id']
|
||||
for x in data
|
||||
for y in x['lignes']
|
||||
if y['personneInfo']['numPerson'] == int(create_data['bart_num'])
|
||||
if y['inscription']['idUnit'] == extrasco_subscribe_info['unit']['id']
|
||||
][0]
|
||||
url = conn + '/delete-basket-line?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'basket_id': basket_id,
|
||||
'line_id': line_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['lignes']}) == 1 # 3 sur Larden
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(get_baskets()) == 1
|
||||
assert len(data[0]['lignes']) == 1 # 3 sur Larden
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# re-subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# add bookings to Bart
|
||||
slots = [x['id'] for x in extrasco_subscribe_info['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['bart_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# add bookings to Maggie
|
||||
slots = [':'.join([create_data['maggie_num']] + x.split(':')[1:]) for x in slots]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['maggie_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# delete basket
|
||||
# should be call by user or by cron job
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
|
||||
@pytest.mark.xfail(run=False)
|
||||
def test_basket_subscribe_extrasco2(conn, create_data, extrasco_subscribe_info2, reference_year):
|
||||
"""Subscribing to a generic unit"""
|
||||
assert extrasco_subscribe_info2['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info2['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info2['place']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'EXTRASCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_bookings(person_id):
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1 # 3 expected
|
||||
|
||||
subs = subscriptions(create_data['bart_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 2
|
||||
assert [x['libelle'] for x in subs[0]['subscribesUnit']] == [
|
||||
'PUBLIK ADL MATERNELLE Lardenne JUIN 22/23 (NEPAS UTILISER)',
|
||||
'PUBLIK ADL MATER JOURNEE AVEC REPAS',
|
||||
]
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert data[0]['text'] == 'ENFANCE LOISIRS'
|
||||
assert len(data[0]['lignes']) == 1 # 3 expected
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
# we should get 3 idIns because we subscribe a generic unit
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1 # 3 expected
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# cannot subscribe Bart twice
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 1
|
||||
assert 'E1019' in resp.json()['err_desc']
|
||||
assert len(get_baskets()) == 1
|
||||
|
||||
# delete basket
|
||||
# should be call by user or by cron job
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# delete (generic) basket line for Bart
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(data[0]['lignes']) == 2 # 6 sur Larden
|
||||
basket_id = data[0]['id']
|
||||
# line for the generic unit for Bart
|
||||
line_id = [
|
||||
y['id']
|
||||
for x in data
|
||||
for y in x['lignes']
|
||||
if y['personneInfo']['numPerson'] == int(create_data['bart_num'])
|
||||
if y['inscription']['idUnit'] == extrasco_subscribe_info2['unit']['id']
|
||||
][0]
|
||||
url = conn + '/delete-basket-line?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'basket_id': basket_id,
|
||||
'line_id': line_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['lignes']}) == 1 # 3 sur Larden
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(get_baskets()) == 1
|
||||
assert len(data[0]['lignes']) == 1 # 3 sur Larden
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# re-subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# add bookings to Bart
|
||||
slots = [x['id'] for x in extrasco_subscribe_info2['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['bart_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# add bookings to Maggie
|
||||
slots = [':'.join([create_data['maggie_num']] + x.split(':')[1:]) for x in slots]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['maggie_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# delete basket
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
|
||||
def test_basket_subscribe_loisirs(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'LOISIRS',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return [
|
||||
x
|
||||
for x in resp.json()['data']
|
||||
if x['libelle'] == 'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES'
|
||||
]
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 109
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1
|
||||
subs = subscriptions(create_data['bart_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 2
|
||||
assert [x['libelle'] for x in subs[0]['subscribesUnit']] == [
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES',
|
||||
'MERCREDI - 15h30/17h - 8/15Ans',
|
||||
]
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 109
|
||||
assert data[0]['text'] == 'SPORT'
|
||||
assert len(data[0]['lignes']) == 1
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1
|
||||
assert data[0]['lignes'][0]['montant'] == 88.5
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# cannot subscribe Bart twice
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 1
|
||||
assert 'E1019' in resp.json()['err_desc']
|
||||
assert len(get_baskets()) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
subs = subscriptions(create_data['maggie_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 2
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['id'] == basket_id
|
||||
assert data[0]['codeRegie'] == 109
|
||||
assert data[0]['text'] == 'SPORT'
|
||||
assert len(data[0]['lignes']) == 2
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 2
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 2
|
||||
assert all(x['montant'] == 88.5 for x in data[0]['lignes'])
|
||||
|
||||
# delete basket line for Bart
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(data[0]['lignes']) == 2
|
||||
basket_id = data[0]['id']
|
||||
# line for Bart
|
||||
line_id = [
|
||||
y['id']
|
||||
for x in data
|
||||
for y in x['lignes']
|
||||
if y['personneInfo']['numPerson'] == int(create_data['bart_num'])
|
||||
][0]
|
||||
url = conn + '/delete-basket-line?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'basket_id': basket_id,
|
||||
'line_id': line_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['codeRegie'] == 109
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['lignes']}) == 1
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(get_baskets()) == 1
|
||||
assert len(data[0]['lignes']) == 1
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# delete basket
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
|
@ -0,0 +1,346 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import diff, link, unlink
|
||||
|
||||
|
||||
def test_direct_debit_order(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'codeRegie': '102',
|
||||
'bank/bankBIC': 'BDFEFR2T',
|
||||
'bank/bankIBAN': 'FR7630001007941234567890185',
|
||||
'bank/bankRUM': 'xxx',
|
||||
'bank/dateStart': '2023-01-01',
|
||||
'bank/bankAddress': '75049 PARIS cedex 01',
|
||||
'bank/civility': 'x',
|
||||
'bank/lastName': 'Ewing',
|
||||
'bank/firstName': 'John Ross',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
url = conn + '/get-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'codeRegie': '102',
|
||||
'dateRef': '2023-01-01',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
res['data']['numPerson'] = 'N/A'
|
||||
assert diff(res['data'], 'test_get_rl1_direct_debit_order.json')
|
||||
|
||||
|
||||
def test_pay_invoice_loisirs(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 109
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 109
|
||||
assert data[0]['text'] == 'SPORT'
|
||||
assert len(data[0]['lignes']) == 2
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 2
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 2
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# validate basket de generate an invoice
|
||||
url = conn + '/validate-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data['idInsLst']) == 2
|
||||
assert len(data['factureLst']) == 1
|
||||
assert len(data['factureLst'][0]['lineInvoiceList']) == 2
|
||||
assert data['factureLst'][0]['regie']['code'] == 109
|
||||
invoice_num = data['factureLst'][0]['numInvoice']
|
||||
invoice_id = data['factureLst'][0]['idInvoice']
|
||||
assert get_baskets() == []
|
||||
|
||||
# get invoices paid
|
||||
url = conn + '/regie/109/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'data': [], 'err': 0}
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/109/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert data[0]['amount'] == '177' # ou juste > 0 ?
|
||||
assert data[0]['online_payment'] is True
|
||||
assert data[0]['paid'] is False
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
||||
|
||||
# payInvoice
|
||||
url = conn + '/regie/109/invoice/%s-%s/pay/' % (create_data['family_id'], invoice_num)
|
||||
payload = {
|
||||
'transaction_date': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S'),
|
||||
'transaction_id': 'xxx',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/109/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'has_invoice_for_payment': True, 'data': [], 'err': 0}
|
||||
|
||||
# get invoices paid
|
||||
url = conn + '/regie/109/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert data[0]['amount'] == '0'
|
||||
assert data[0]['total_amount'] == '177' # ou juste > 0 ?
|
||||
assert data[0]['online_payment'] is False
|
||||
assert data[0]['paid'] is True
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
||||
|
||||
|
||||
def test_payinvoice_extrasco(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'EXTRASCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_bookings(person_id):
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1
|
||||
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert len(data[0]['lignes']) == 1
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# add bookings to Bart
|
||||
slots = [x['id'] for x in extrasco_subscribe_info['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['bart_num']) if x['prefill'] is True]) > 0
|
||||
|
||||
# add bookings to Maggie
|
||||
slots = [':'.join([create_data['maggie_num']] + x.split(':')[1:]) for x in slots]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['maggie_num']) if x['prefill'] is True]) > 0
|
||||
|
||||
# validate basket
|
||||
url = conn + '/validate-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data['idInsLst']) == 2
|
||||
assert len(data['factureLst']) == 1
|
||||
assert get_baskets() == []
|
||||
assert len(data['factureLst'][0]['lineInvoiceList']) == 2
|
||||
assert data['factureLst'][0]['regie']['code'] == 105
|
||||
invoice_num = data['factureLst'][0]['numInvoice']
|
||||
invoice_id = data['factureLst'][0]['idInvoice']
|
||||
|
||||
# get invoices paid
|
||||
url = conn + '/regie/105/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'data': [], 'err': 0}
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/105/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert int(data[0]['amount']) > 0
|
||||
assert data[0]['online_payment'] is True
|
||||
assert data[0]['paid'] is False
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
||||
|
||||
# payInvoice
|
||||
url = conn + '/regie/105/invoice/%s-%s/pay/' % (create_data['family_id'], invoice_num)
|
||||
payload = {
|
||||
'transaction_date': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S'),
|
||||
'transaction_id': 'xxx',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/105/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'has_invoice_for_payment': True, 'data': [], 'err': 0}
|
||||
|
||||
# get invoices history
|
||||
url = conn + '/regie/105/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert data[0]['amount'] == '0'
|
||||
assert int(data[0]['total_amount']) > 0
|
||||
assert data[0]['online_payment'] is False
|
||||
assert data[0]['paid'] is True
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
|
@ -2,9 +2,9 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--url", help="Url of a passerelle Vivaticket connector instance")
|
||||
parser.addoption('--url', help='Url of a passerelle Vivaticket connector instance')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption("--url")
|
||||
return request.config.getoption('--url')
|
||||
|
|
|
@ -6,7 +6,7 @@ import requests
|
|||
|
||||
|
||||
def call_generic(conn, endpoint):
|
||||
print("%s \n" % endpoint)
|
||||
print('%s \n' % endpoint)
|
||||
url = conn + '/%s' % endpoint
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -50,7 +50,7 @@ def test_book_event(conn):
|
|||
themes = call_generic(conn, 'themes')
|
||||
random.shuffle(themes)
|
||||
payload['theme'] = themes[0]['id']
|
||||
print("Creating booking with the following payload:\n%s" % payload)
|
||||
print('Creating booking with the following payload:\n%s' % payload)
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/sh -ue
|
||||
|
||||
test -d wcs || git clone https://git.entrouvert.org/entrouvert/wcs.git
|
||||
(cd wcs && git pull)
|
|
@ -2,8 +2,8 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passerelle.settings")
|
||||
if __name__ == '__main__':
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'passerelle.settings')
|
||||
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
|
|
|
@ -102,6 +102,7 @@ class AddressResource(BaseResource):
|
|||
@endpoint(
|
||||
name='sectors',
|
||||
description=_('List related Sectorizations'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'id': {'description': _('Sector Identifier (slug)')},
|
||||
'q': {'description': _('Filter by Sector Title or Identifier')},
|
||||
|
|
|
@ -48,7 +48,7 @@ class ActesWeb(BaseResource):
|
|||
def basepath(self):
|
||||
return os.path.join(default_storage.path('actesweb'), self.slug)
|
||||
|
||||
@endpoint(perm='can_access', methods=['post'], description=_('Create demand'))
|
||||
@endpoint(methods=['post'], description=_('Create demand'))
|
||||
def create(self, request, *args, **kwargs):
|
||||
try:
|
||||
payload = json.loads(request.body)
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
# Generated by Django 3.2.18 on 2023-07-07 10:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('base', '0030_resourcelog_base_resour_appname_298cbc_idx'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AdullactPastell',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
|
||||
),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
(
|
||||
'basic_auth_username',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication username'
|
||||
),
|
||||
),
|
||||
(
|
||||
'basic_auth_password',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication password'
|
||||
),
|
||||
),
|
||||
(
|
||||
'client_certificate',
|
||||
models.FileField(
|
||||
blank=True, null=True, upload_to='', verbose_name='TLS client certificate'
|
||||
),
|
||||
),
|
||||
(
|
||||
'trusted_certificate_authorities',
|
||||
models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs'),
|
||||
),
|
||||
(
|
||||
'verify_cert',
|
||||
models.BooleanField(blank=True, default=True, verbose_name='TLS verify certificates'),
|
||||
),
|
||||
(
|
||||
'http_proxy',
|
||||
models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'),
|
||||
),
|
||||
(
|
||||
'api_base_url',
|
||||
models.URLField(
|
||||
help_text='Example: https://pastell.example.com/api/v2/',
|
||||
max_length=128,
|
||||
verbose_name='API base URL',
|
||||
),
|
||||
),
|
||||
('token', models.CharField(blank=True, max_length=128, verbose_name='API token')),
|
||||
(
|
||||
'users',
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name='_adullact_pastell_adullactpastell_users_+',
|
||||
related_query_name='+',
|
||||
to='base.ApiUser',
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Adullact Pastell',
|
||||
},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,265 @@
|
|||
# passerelle - uniform access to multiple data sources and services
|
||||
# Copyright (C) 2023 Entr'ouvert
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import base64
|
||||
from urllib import parse as urlparse
|
||||
|
||||
import requests
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.http import HttpResponse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from passerelle.base.models import BaseResource, HTTPResource
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
|
||||
FILE_OBJECT_PROPERTIES = {
|
||||
'title': _('File object'),
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'description': _('Filename'),
|
||||
},
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'description': _('Content'),
|
||||
},
|
||||
'content_type': {
|
||||
'type': 'string',
|
||||
'description': _('Content type'),
|
||||
},
|
||||
},
|
||||
'required': ['filename', 'content'],
|
||||
}
|
||||
|
||||
|
||||
DOCUMENT_CREATION_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'required': ['type'],
|
||||
'additionalProperties': True,
|
||||
'properties': {
|
||||
'type': {'type': 'string', 'description': _('Document type')},
|
||||
'file_field_name': {
|
||||
'type': 'string',
|
||||
'description': _('Document file\'s field name'),
|
||||
},
|
||||
'file': FILE_OBJECT_PROPERTIES,
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'description': _('Filename (takes precedence over filename in "file" object)'),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
DOCUMENT_FILE_UPLOAD_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'required': ['file', 'file_field_name'],
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'description': _('Filename (takes precedence over filename in "file" object)'),
|
||||
},
|
||||
'file': FILE_OBJECT_PROPERTIES,
|
||||
'file_field_name': {
|
||||
'type': 'string',
|
||||
'description': _('Document file\'s field name'),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class AdullactPastell(BaseResource, HTTPResource):
|
||||
api_base_url = models.URLField(
|
||||
max_length=128,
|
||||
verbose_name=_('API base URL'),
|
||||
help_text=_('Example: https://pastell.example.com/api/v2/'),
|
||||
)
|
||||
token = models.CharField(max_length=128, blank=True, verbose_name=_('API token'))
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
||||
log_requests_errors = False
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Adullact Pastell')
|
||||
|
||||
def clean(self, *args, **kwargs):
|
||||
if not self.token and not self.basic_auth_username:
|
||||
raise ValidationError(_('API token or authentication username and password should be defined.'))
|
||||
return super().clean(*args, **kwargs)
|
||||
|
||||
def call(self, path, method='get', params=None, **kwargs):
|
||||
url = urlparse.urljoin(self.api_base_url, path)
|
||||
if self.token:
|
||||
kwargs.update({'headers': {'Authorization': 'Bearer: %s' % self.token}, 'auth': None})
|
||||
try:
|
||||
response = self.requests.request(url=url, method=method, params=params, **kwargs)
|
||||
response.raise_for_status()
|
||||
except (requests.Timeout, requests.RequestException) as e:
|
||||
raise APIError(str(e))
|
||||
return response
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
response = self.call('version')
|
||||
except APIError as e:
|
||||
raise Exception('Pastell server is down: %s' % e)
|
||||
return {'data': response.json()}
|
||||
|
||||
def upload_file(self, entity_id, document_id, file_field_name, data, **kwargs):
|
||||
filename = kwargs.get('filename') or data['filename']
|
||||
file_data = {
|
||||
'file_content': (
|
||||
filename,
|
||||
base64.b64decode(data['content']),
|
||||
data.get('content_type'),
|
||||
)
|
||||
}
|
||||
|
||||
return self.call(
|
||||
'entite/%s/document/%s/file/%s' % (entity_id, document_id, file_field_name),
|
||||
'post',
|
||||
files=file_data,
|
||||
data={'file_name': filename},
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
description=_('List entities'),
|
||||
datasource=True,
|
||||
)
|
||||
def entities(self, request):
|
||||
data = []
|
||||
response = self.call('entite')
|
||||
for item in response.json():
|
||||
item['id'] = item['id_e']
|
||||
item['text'] = item['denomination']
|
||||
data.append(item)
|
||||
return {'data': data}
|
||||
|
||||
@endpoint(
|
||||
description=_('List entity documents'),
|
||||
parameters={'entity_id': {'description': _('Entity ID'), 'example_value': '42'}},
|
||||
datasource=True,
|
||||
)
|
||||
def documents(self, request, entity_id):
|
||||
if request.GET.get('id'):
|
||||
response = self.call('entite/%s/document/%s' % (entity_id, request.GET['id']))
|
||||
return {'data': response.json()}
|
||||
|
||||
data = []
|
||||
response = self.call('entite/%s/document' % entity_id)
|
||||
for item in response.json():
|
||||
item['id'] = item['id_d']
|
||||
item['text'] = item['titre']
|
||||
data.append(item)
|
||||
return {'data': data}
|
||||
|
||||
@endpoint(
|
||||
post={
|
||||
'description': _('Create a document for an entity'),
|
||||
'request_body': {'schema': {'application/json': DOCUMENT_CREATION_SCHEMA}},
|
||||
},
|
||||
name='create-document',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
},
|
||||
)
|
||||
def create_document(self, request, entity_id, post_data):
|
||||
file_data = post_data.pop('file', None)
|
||||
file_field_name = post_data.pop('file_field_name', None)
|
||||
|
||||
# create document
|
||||
response = self.call('entite/%s/document' % entity_id, 'post', params=post_data)
|
||||
document_id = response.json()['id_d']
|
||||
|
||||
# update it with other attributes
|
||||
response = self.call('entite/%s/document/%s' % (entity_id, document_id), 'patch', params=post_data)
|
||||
|
||||
# upload file if it's filled
|
||||
if file_field_name and file_data:
|
||||
self.upload_file(entity_id, document_id, file_field_name, file_data, **post_data)
|
||||
|
||||
return {'data': response.json()}
|
||||
|
||||
@endpoint(
|
||||
post={
|
||||
'description': _('Upload a file to a document'),
|
||||
'request_body': {'schema': {'application/json': DOCUMENT_FILE_UPLOAD_SCHEMA}},
|
||||
},
|
||||
name='upload-document-file',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
'document_id': {'description': _('Document ID'), 'example_value': 'hDWtdSC'},
|
||||
},
|
||||
)
|
||||
def upload_document_file(self, request, entity_id, document_id, post_data):
|
||||
file_field_name = post_data.pop('file_field_name')
|
||||
file_data = post_data.pop('file')
|
||||
response = self.upload_file(entity_id, document_id, file_field_name, file_data, **post_data)
|
||||
return {'data': response.json()}
|
||||
|
||||
@endpoint(
|
||||
description=_('Get document\'s file'),
|
||||
name='get-document-file',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
'document_id': {'description': _('Document ID'), 'example_value': 'hDWtdSC'},
|
||||
'field_name': {
|
||||
'description': _('Document file\'s field name'),
|
||||
'example_value': 'document',
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_document_file(self, request, entity_id, document_id, field_name):
|
||||
document = self.call('entite/%s/document/%s/file/%s' % (entity_id, document_id, field_name))
|
||||
response = HttpResponse(document.content, content_type=document.headers['Content-Type'])
|
||||
response['Content-Disposition'] = document.headers['Content-disposition']
|
||||
return response
|
||||
|
||||
@endpoint(
|
||||
post={
|
||||
'description': _('Run action on document'),
|
||||
'request_body': {
|
||||
'schema': {
|
||||
'application/json': {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'required': ['action_name'],
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'action_name': {'type': 'string', 'description': _('Action name')},
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
name='run-document-action',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
'document_id': {'description': _('Document ID'), 'example_value': 'hDWtdSC'},
|
||||
},
|
||||
)
|
||||
def run_document_action(self, request, entity_id, document_id, post_data):
|
||||
response = self.call(
|
||||
'entite/%s/document/%s/action/%s' % (entity_id, document_id, post_data['action_name']), 'post'
|
||||
)
|
||||
return {'data': response.json()}
|
|
@ -44,6 +44,7 @@ class AirQuality(BaseResource):
|
|||
@endpoint(
|
||||
pattern=r'^(?P<country>\w+)/(?P<city>\w+)/$',
|
||||
example_pattern='{country}/{city}/',
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'country': {'description': _('Country Code'), 'example_value': 'fr'},
|
||||
'city': {'description': _('City Name'), 'example_value': 'lyon'},
|
||||
|
|
|
@ -185,7 +185,6 @@ class APIEntreprise(BaseResource):
|
|||
METHOD_PARAM = {'description': _('method used for user identity matching'), 'example_value': 'simple'}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<association_id>\w+)/$',
|
||||
example_pattern='{association_id}/',
|
||||
description=_('Get association\'s documents'),
|
||||
|
@ -289,7 +288,6 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': document}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<siren>\w+)/$',
|
||||
example_pattern='{siren}/',
|
||||
description=_('Get firm\'s data from Infogreffe'),
|
||||
|
@ -305,7 +303,6 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': raw_data['data']}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<association_id>\w+)/$',
|
||||
example_pattern='{association_id}/',
|
||||
description=_('Get association\'s related informations'),
|
||||
|
@ -324,7 +321,6 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': res}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<siren>\w+)/$',
|
||||
example_pattern='{siren}/',
|
||||
description=_('Get firm\'s related informations'),
|
||||
|
@ -385,7 +381,6 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': {'entreprise': data, 'etablissement_siege': siege_data}}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
methods=['get'],
|
||||
pattern=r'(?P<siret>\w+)/$',
|
||||
example_pattern='{siret}/',
|
||||
|
@ -420,7 +415,6 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': res}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
methods=['get'],
|
||||
pattern=r'(?P<siret>\w+)/$',
|
||||
example_pattern='{siret}/',
|
||||
|
@ -436,7 +430,6 @@ class APIEntreprise(BaseResource):
|
|||
return self.get('v3/dgfip/etablissements/%s/chiffres_affaires' % siret, raw=True, **kwargs)
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<siren>\w+)/$',
|
||||
description=_(
|
||||
'Match firm\'s society representative against local FranceConnect identity information'
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
# Generated by Django 3.2.18 on 2023-04-14 17:35
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('base', '0030_resourcelog_base_resour_appname_298cbc_idx'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Resource',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
|
||||
),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
(
|
||||
'api_url',
|
||||
models.URLField(
|
||||
default='https://gw.dgfip.finances.gouv.fr/impotparticulier/1.0',
|
||||
max_length=256,
|
||||
verbose_name='DGFIP API base URL',
|
||||
),
|
||||
),
|
||||
('oauth_username', models.CharField(max_length=128, verbose_name='DGFIP API Username')),
|
||||
('oauth_password', models.CharField(max_length=128, verbose_name='DGFIP API Password')),
|
||||
(
|
||||
'oauth_scopes',
|
||||
models.CharField(max_length=128, verbose_name='DGFIP API Scopes', blank=True),
|
||||
),
|
||||
(
|
||||
'id_teleservice',
|
||||
models.TextField(max_length=128, verbose_name='DGFIP API ID_Teleservice', blank=True),
|
||||
),
|
||||
(
|
||||
'users',
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name='_api_impot_particulier_resource_users_+',
|
||||
related_query_name='+',
|
||||
to='base.ApiUser',
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'API Impot Particulier',
|
||||
},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,22 @@
|
|||
# Generated by Django 3.2.18 on 2023-05-25 09:49
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('api_impot_particulier', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='resource',
|
||||
name='id_teleservice',
|
||||
field=models.TextField(max_length=128, verbose_name='DGFIP API ID_Teleservice'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='resource',
|
||||
name='oauth_scopes',
|
||||
field=models.CharField(max_length=128, verbose_name='DGFIP API Scopes'),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,306 @@
|
|||
# passerelle - uniform access to multiple data sources and services
|
||||
# Copyright (C) 2023 Entr'ouvert
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
import hashlib
|
||||
import uuid
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import requests
|
||||
from django.core.cache import cache
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from passerelle.base.models import BaseResource
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
from passerelle.utils.timeout import Timeout
|
||||
|
||||
|
||||
class ServiceIsDown(APIError):
|
||||
def __init__(self):
|
||||
super().__init__(_('API Impot Particulier service is unavailable'))
|
||||
|
||||
def __str__(self):
|
||||
if self.__context__:
|
||||
return f'{super().__str__()}: {self.__context__}'
|
||||
return super().__str__()
|
||||
|
||||
|
||||
class Resource(BaseResource):
|
||||
api_url = models.URLField(
|
||||
_('DGFIP API base URL'),
|
||||
max_length=256,
|
||||
default='https://gw.dgfip.finances.gouv.fr/impotparticulier/1.0',
|
||||
)
|
||||
oauth_username = models.CharField(_('DGFIP API Username'), max_length=128)
|
||||
oauth_password = models.CharField(_('DGFIP API Password'), max_length=128)
|
||||
oauth_scopes = models.CharField(_('DGFIP API Scopes'), max_length=128)
|
||||
id_teleservice = models.TextField(_('DGFIP API ID_Teleservice'), max_length=128)
|
||||
|
||||
log_requests_errors = False
|
||||
requests_timeout = 30
|
||||
requests_max_retries = {
|
||||
'total': 3,
|
||||
'backoff_factor': 0.5,
|
||||
'allowed_methods': ['GET', 'POST'],
|
||||
# retry after: 0.5, 1.5 and 3.5 seconds
|
||||
'status_forcelist': [413, 429, 503, 504],
|
||||
}
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('API Impot Particulier')
|
||||
|
||||
@classmethod
|
||||
def parse_numero_fiscal(cls, value):
|
||||
value = value.strip().replace(' ', '')
|
||||
if not (value and value.isascii() and value.isdigit()):
|
||||
raise APIError(_('invalid numero_fiscal'))
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def parse_annee_de_revenu(cls, value):
|
||||
try:
|
||||
value = int(value)
|
||||
except (TypeError, ValueError):
|
||||
raise APIError(_('invalid annee_de_revenu'))
|
||||
today = datetime.date.today()
|
||||
if not (0 < today.year - value < 10):
|
||||
raise APIError(_('invalid annee_de_revenu'))
|
||||
return value
|
||||
|
||||
@endpoint(
|
||||
name='spi-situations-ir-assiettes-annrev',
|
||||
description=_('Provides revenue tax situation for a specific year.'),
|
||||
parameters={
|
||||
'numero_fiscal': {
|
||||
'description': _('Tax number of the person'),
|
||||
},
|
||||
'annee_de_revenu': {
|
||||
'description': _('Income year'),
|
||||
},
|
||||
},
|
||||
)
|
||||
def spi_situations_ir_assiettes_annrev(self, request, numero_fiscal, annee_de_revenu):
|
||||
numero_fiscal = self.parse_numero_fiscal(numero_fiscal)
|
||||
annee_de_revenu = self.parse_annee_de_revenu(annee_de_revenu)
|
||||
return {
|
||||
'data': self.get_spi_situations_ir_assiettes_annrev(
|
||||
numero_fiscal=numero_fiscal, annee_de_revenu=annee_de_revenu, timeout=Timeout(20)
|
||||
)
|
||||
}
|
||||
|
||||
def get_spi_situations_ir_assiettes_annrev(self, numero_fiscal, annee_de_revenu, timeout=None):
|
||||
return self.call(
|
||||
name='spi-situations-ir-assiettes-deuxans',
|
||||
endpoint_template='spi/{spi}/situations/ir/assiettes/annrev/{annrev}',
|
||||
timeout=timeout,
|
||||
spi=numero_fiscal,
|
||||
annrev=annee_de_revenu,
|
||||
accept='application/prs.dgfip.part.situations.ir.assiettes.v1+json',
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
name='spi-situations-th-assiettes-principale-annrev',
|
||||
description=_('Provides housing tax situation for a specific year.'),
|
||||
parameters={
|
||||
'numero_fiscal': {
|
||||
'description': _('Tax number of the person'),
|
||||
},
|
||||
'annee_de_revenu': {
|
||||
'description': _('Income year'),
|
||||
},
|
||||
},
|
||||
)
|
||||
def spi_situations_th_assiettes_principale_annrev(self, request, numero_fiscal, annee_de_revenu):
|
||||
numero_fiscal = self.parse_numero_fiscal(numero_fiscal)
|
||||
annee_de_revenu = self.parse_annee_de_revenu(annee_de_revenu)
|
||||
return {
|
||||
'data': self.get_spi_situations_th_assiettes_principale_annrev(
|
||||
numero_fiscal=numero_fiscal, annee_de_revenu=annee_de_revenu, timeout=Timeout(20)
|
||||
)
|
||||
}
|
||||
|
||||
def get_spi_situations_th_assiettes_principale_annrev(self, numero_fiscal, annee_de_revenu, timeout=None):
|
||||
return self.call(
|
||||
name='spi-situations-th-assiettes-principale-deuxans',
|
||||
endpoint_template='spi/{spi}/situations/th/assiettes/principale/annrev/{annrev}',
|
||||
timeout=timeout,
|
||||
spi=numero_fiscal,
|
||||
annrev=annee_de_revenu,
|
||||
accept='application/prs.dgfip.part.situations.th.assiettes.v1+json',
|
||||
)
|
||||
|
||||
def call(self, name, endpoint_template, timeout=None, **kwargs):
|
||||
correlation_id = str(uuid.uuid4().hex)
|
||||
kwargs_formatted = ', '.join(f'{key}={value}' for key, value in kwargs.items())
|
||||
try:
|
||||
data = self.get_tax_data(
|
||||
session=self.requests,
|
||||
base_url=self.api_url,
|
||||
access_token=self._get_access_token(timeout=timeout),
|
||||
correlation_id=correlation_id,
|
||||
endpoint_template=endpoint_template,
|
||||
id_teleservice=self.id_teleservice,
|
||||
timeout=timeout,
|
||||
**kwargs,
|
||||
)
|
||||
except ServiceIsDown as e:
|
||||
self.logger.warning(
|
||||
'%s(%s) failed: %s',
|
||||
name,
|
||||
kwargs_formatted,
|
||||
e,
|
||||
extra={
|
||||
'correlation_id': correlation_id,
|
||||
'id_teleservice': self.id_teleservice,
|
||||
'kwargs': kwargs,
|
||||
},
|
||||
)
|
||||
raise
|
||||
else:
|
||||
self.logger.warning(
|
||||
'%s(%s) success',
|
||||
name,
|
||||
kwargs_formatted,
|
||||
extra={
|
||||
'data': data,
|
||||
'correlation_id': correlation_id,
|
||||
'id_teleservice': self.id_teleservice,
|
||||
'kwargs': kwargs,
|
||||
},
|
||||
)
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def get_tax_data(
|
||||
cls,
|
||||
session,
|
||||
base_url,
|
||||
access_token,
|
||||
correlation_id,
|
||||
endpoint_template,
|
||||
accept,
|
||||
id_teleservice=None,
|
||||
headers=None,
|
||||
timeout=None,
|
||||
**kwargs,
|
||||
):
|
||||
headers = {
|
||||
**(headers or {}),
|
||||
'Authorization': f'Bearer {access_token}',
|
||||
'X-Correlation-ID': correlation_id,
|
||||
'Accept': accept,
|
||||
}
|
||||
if id_teleservice:
|
||||
headers['ID_Teleservice'] = id_teleservice
|
||||
|
||||
endpoint = endpoint_template.format(**kwargs)
|
||||
if not base_url.endswith('/'):
|
||||
base_url += '/'
|
||||
url = urljoin(base_url, endpoint)
|
||||
|
||||
if timeout is not None:
|
||||
timeout = float(timeout)
|
||||
|
||||
# api-impot-particulier error reporting is byzantine, some errors are
|
||||
# accompanied by a 4xx code, some others with a 20x code, some have a
|
||||
# JSON content, other are only identified by a codeapp header on
|
||||
# the response
|
||||
try:
|
||||
response = session.get(url, headers=headers, timeout=timeout)
|
||||
response.raise_for_status()
|
||||
except requests.HTTPError:
|
||||
try:
|
||||
content = response.json()['erreur']
|
||||
except (ValueError, KeyError):
|
||||
try:
|
||||
raise APIError(
|
||||
'api-impot-particulier error', data={'codeapp': response.headers['codeapp']}
|
||||
)
|
||||
except KeyError:
|
||||
pass
|
||||
raise ServiceIsDown
|
||||
raise APIError('api-impot-particulier-error', data=content)
|
||||
except requests.RequestException:
|
||||
raise ServiceIsDown
|
||||
|
||||
if response.status_code != 200:
|
||||
try:
|
||||
content = response.json()['erreur']
|
||||
except (ValueError, KeyError):
|
||||
try:
|
||||
raise APIError(
|
||||
'api-impot-particulier-error', data={'codeapp': response.headers['codeapp']}
|
||||
)
|
||||
except KeyError:
|
||||
raise ServiceIsDown
|
||||
raise APIError('api-impot-particulier error', data=content)
|
||||
|
||||
try:
|
||||
response_data = response.json()
|
||||
except ValueError:
|
||||
raise ServiceIsDown
|
||||
return response_data
|
||||
|
||||
def _get_access_token(self, timeout=None):
|
||||
key = (
|
||||
'dgfip-at-'
|
||||
+ hashlib.sha256(
|
||||
f'{self.oauth_username}-{self.oauth_password}-{self.api_url}'.encode()
|
||||
).hexdigest()
|
||||
)
|
||||
|
||||
access_token = cache.get(key)
|
||||
if not access_token:
|
||||
access_token = self.get_access_token(
|
||||
session=self.requests,
|
||||
base_url=self.api_url,
|
||||
username=self.oauth_username,
|
||||
password=self.oauth_password,
|
||||
scope=self.oauth_scopes,
|
||||
timeout=timeout,
|
||||
)
|
||||
cache.set(key, access_token, 300)
|
||||
return access_token
|
||||
|
||||
@classmethod
|
||||
def get_access_token(cls, session, base_url, username, password, scope, timeout=None):
|
||||
data = {
|
||||
'grant_type': 'client_credentials',
|
||||
}
|
||||
if scope:
|
||||
data['scope'] = scope
|
||||
|
||||
url = urljoin(base_url, '/token')
|
||||
|
||||
if timeout is not None:
|
||||
timeout = float(timeout)
|
||||
|
||||
try:
|
||||
response = session.post(url, data=data, auth=(username, password), timeout=timeout)
|
||||
response.raise_for_status()
|
||||
except requests.RequestException:
|
||||
raise ServiceIsDown
|
||||
try:
|
||||
response_data = response.json()
|
||||
access_token = response_data['access_token']
|
||||
response_data = response.json()
|
||||
except (ValueError, KeyError, TypeError):
|
||||
raise ServiceIsDown
|
||||
return access_token
|
|
@ -17,8 +17,9 @@ KNOWN_ERRORS = {
|
|||
'Pas de droit sur la période demandée pour la prestation sélectionnée et le bénéficiaire choisi',
|
||||
'Pas de droit sur la période demandée pour la prestation sélectionnée.',
|
||||
"Votre quotient familial (Qf) sur cette période est non disponible. Pour plus d'information, contactez-nous.",
|
||||
# API particulier error message not from the source above
|
||||
# API particulier error messages not from the source above
|
||||
'Les paramètres fournis sont incorrects ou ne correspondent pas à un avis',
|
||||
"L'identifiant indiqué n'existe pas, n'est pas connu ou ne comporte aucune information pour cet appel.",
|
||||
},
|
||||
400: {
|
||||
'Absence de code confidentiel. Le document ne peut être édité.',
|
||||
|
@ -30,6 +31,8 @@ KNOWN_ERRORS = {
|
|||
'Il existe des droits pour la prestation sélectionnée sur le dossier et/ou la période demandée',
|
||||
'Il existe des droits pour la prestation sélectionnée sur le dossier et/ou la période demandée (après date du jour)',
|
||||
'L’opérateurs téléphonique» ne propose pas de raccordement SMS avec un prestataire externe (raccordement avec un numéro court). ',
|
||||
# API particulier error messages not from the source above
|
||||
"La référence de l'avis n'est pas correctement formatée",
|
||||
},
|
||||
500: {
|
||||
'Les informations souhaitées sont momentanément indisponibles. Merci de renouveler votre demande ultérieurement.',
|
||||
|
@ -39,7 +42,7 @@ KNOWN_ERRORS = {
|
|||
"Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. Des paramètres manquent.",
|
||||
(
|
||||
"Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. "
|
||||
"La taille du message ne doit pas être supérieure à 160 caractères."
|
||||
'La taille du message ne doit pas être supérieure à 160 caractères.'
|
||||
),
|
||||
(
|
||||
"Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. "
|
||||
|
@ -50,7 +53,7 @@ KNOWN_ERRORS = {
|
|||
"Votre demande n'a pu aboutir en raison d'une erreur technique lié à l'appel au service IMC.",
|
||||
(
|
||||
"Votre demande n’a pu aboutir en raison d'un problème technique lié aux données entrantes du webservice. "
|
||||
"Merci de renouveler votre demande ultérieurement."
|
||||
'Merci de renouveler votre demande ultérieurement.'
|
||||
),
|
||||
},
|
||||
}
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
# Generated by Django 3.2.18 on 2023-12-13 10:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('api_particulier', '0006_api_key_length_1024'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='apiparticulier',
|
||||
name='api_key',
|
||||
field=models.CharField(blank=True, default='', max_length=2048, verbose_name='API key'),
|
||||
),
|
||||
]
|
|
@ -63,7 +63,7 @@ class APIParticulier(BaseResource):
|
|||
choices=[(key, platform['label']) for key, platform in PLATFORMS.items()],
|
||||
)
|
||||
|
||||
api_key = models.CharField(max_length=1024, default='', blank=True, verbose_name=_('API key'))
|
||||
api_key = models.CharField(max_length=2048, default='', blank=True, verbose_name=_('API key'))
|
||||
|
||||
log_requests_errors = False
|
||||
|
||||
|
@ -170,7 +170,6 @@ class APIParticulier(BaseResource):
|
|||
self.save()
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
description=_('Get scopes available'),
|
||||
display_order=1,
|
||||
)
|
||||
|
@ -184,7 +183,6 @@ class APIParticulier(BaseResource):
|
|||
}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
show=False,
|
||||
description=_('Get citizen\'s fiscal informations'),
|
||||
parameters={
|
||||
|
@ -208,7 +206,6 @@ class APIParticulier(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='avis-imposition',
|
||||
perm='can_access',
|
||||
description=_('Get citizen\'s fiscal informations'),
|
||||
parameters={
|
||||
'numero_fiscal': {
|
||||
|
@ -303,7 +300,6 @@ class APIParticulier(BaseResource):
|
|||
return data
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
show=False,
|
||||
description=_('Get family allowances recipient informations'),
|
||||
parameters={
|
||||
|
@ -327,7 +323,6 @@ class APIParticulier(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='situation-familiale',
|
||||
perm='can_access',
|
||||
description=_('Get family allowances recipient informations'),
|
||||
parameters={
|
||||
'code_postal': {
|
||||
|
@ -363,6 +358,11 @@ class APIParticulier(BaseResource):
|
|||
)
|
||||
data['data']['numero_allocataire'] = numero_allocataire
|
||||
data['data']['code_postal'] = code_postal
|
||||
for kind in 'allocataires', 'enfants':
|
||||
for person in data['data'].get(kind) or []:
|
||||
if len(person.get('dateDeNaissance') or '') == 8:
|
||||
birthdate = person['dateDeNaissance']
|
||||
person['dateDeNaissance_iso'] = birthdate[4:] + '-' + birthdate[2:4] + '-' + birthdate[:2]
|
||||
return data
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import json
|
||||
import string
|
||||
from urllib import parse as urlparse
|
||||
|
||||
|
@ -32,6 +33,42 @@ from passerelle.utils.conversion import num2deg
|
|||
from passerelle.utils.jsonresponse import APIError
|
||||
from passerelle.utils.templates import render_to_string, validate_template
|
||||
|
||||
EDIT_ITEM_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'Item schema',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'geometry': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'x': {'type': 'string'},
|
||||
'y': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
'attributes': {'type': 'object'},
|
||||
},
|
||||
'required': ['attributes'],
|
||||
}
|
||||
|
||||
EDIT_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'Edit payload',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'adds': {
|
||||
'type': 'array',
|
||||
'description': 'Adds object',
|
||||
'items': EDIT_ITEM_SCHEMA,
|
||||
},
|
||||
'updates': {'type': 'array', 'description': 'Updates object', 'items': EDIT_ITEM_SCHEMA},
|
||||
'deletes': {'type': 'array', 'description': 'Deletes object', 'items': {'type': 'string'}},
|
||||
},
|
||||
'minProperties': 1,
|
||||
'unflatten': True,
|
||||
}
|
||||
|
||||
|
||||
class ArcGISError(APIError):
|
||||
pass
|
||||
|
@ -177,7 +214,6 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='mapservice-query',
|
||||
description=_('Map Service Query'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'folder': {
|
||||
'description': _('Folder name'),
|
||||
|
@ -247,7 +283,6 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='featureservice-query',
|
||||
description=_('Feature Service Query'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'folder': {
|
||||
'description': _('Folder name'),
|
||||
|
@ -318,9 +353,49 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
text_fieldname=text_fieldname,
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
name='featureservice-applyedits',
|
||||
description=_('Feature Service Apply Edits'),
|
||||
parameters={
|
||||
'folder': {
|
||||
'description': _('Folder name'),
|
||||
'example_value': 'Specialty',
|
||||
},
|
||||
'service': {
|
||||
'description': _('Service name'),
|
||||
'example_value': 'ESRI_StateCityHighway_USA',
|
||||
},
|
||||
'layer': {
|
||||
'description': _('Layer or table name'),
|
||||
'example_value': '1',
|
||||
},
|
||||
},
|
||||
post={'request_body': {'schema': {'application/json': EDIT_SCHEMA}}},
|
||||
)
|
||||
def featureservice_applyedits(
|
||||
self,
|
||||
request,
|
||||
post_data,
|
||||
service,
|
||||
layer='0',
|
||||
folder='',
|
||||
):
|
||||
# implement "apply edits" feature service
|
||||
# https://developers.arcgis.com/rest/services-reference/enterprise/apply-edits-feature-service-layer-.htm
|
||||
uri = 'services/'
|
||||
if folder:
|
||||
uri += folder + '/'
|
||||
uri = uri + service + '/FeatureServer/' + layer + '/applyEdits'
|
||||
params = {'f': 'pjson'}
|
||||
for key, value in post_data.items():
|
||||
post_data[key] = json.dumps(value)
|
||||
params.update(post_data)
|
||||
return {'data': self.request(urlparse.urljoin(self.base_url, uri), data=params)}
|
||||
|
||||
@endpoint(
|
||||
name='tile',
|
||||
description=_('Tiles layer'),
|
||||
perm='OPEN',
|
||||
pattern=r'^(?P<layer>[\w/]+)/(?P<zoom>\d+)/(?P<tile_x>\d+)/(?P<tile_y>\d+)\.png$',
|
||||
)
|
||||
def tile(self, request, layer, zoom, tile_x, tile_y):
|
||||
|
@ -349,7 +424,6 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
name='q',
|
||||
description=_('Query'),
|
||||
pattern=r'^(?P<query_slug>[\w:_-]+)/$',
|
||||
perm='can_access',
|
||||
show=False,
|
||||
)
|
||||
def q(self, request, query_slug, q=None, full=False, **kwargs):
|
||||
|
|
|
@ -75,13 +75,21 @@ class ArpegeECP(BaseResource):
|
|||
@endpoint(
|
||||
name='api',
|
||||
pattern=r'^users/(?P<nameid>\w+)/forms$',
|
||||
perm='can_access',
|
||||
description='Returns user forms',
|
||||
example_pattern='users/{nameid}/forms',
|
||||
description=_('Returns user forms'),
|
||||
parameters={
|
||||
'nameid': {'description': _('Publik ID'), 'example_value': 'nameid'},
|
||||
'status': {'description': _('Demands status'), 'example_value': 'pending'},
|
||||
},
|
||||
)
|
||||
def get_user_forms(self, request, nameid):
|
||||
def get_user_forms(self, request, nameid, status='pending'):
|
||||
access_token = self.get_access_token(nameid)
|
||||
url = urlparse.urljoin(self.webservice_base_url, 'DemandesUsager')
|
||||
params = {'scope': 'data_administratives'}
|
||||
if status == 'pending':
|
||||
params['EtatDemande'] = 'DEPOSEE, ENCRSINSTR' # value for filtering pending forms
|
||||
elif status == 'done':
|
||||
params['EtatDemande'] = 'TRAITEEPOS, TRAITEENEG, TRAITEE' # value for filtering done forms
|
||||
auth = HawkAuth(self.hawk_auth_id, self.hawk_auth_key, ext=access_token)
|
||||
try:
|
||||
response = self.requests.get(url, params=params, auth=auth)
|
||||
|
@ -94,7 +102,7 @@ class ArpegeECP(BaseResource):
|
|||
except ValueError:
|
||||
raise APIError('No JSON content returned: %r' % response.content[:1000])
|
||||
if not result.get('Data'):
|
||||
raise APIError("%s (%s)" % (result.get('LibErreur'), result.get('CodErreur')))
|
||||
raise APIError('%s (%s)' % (result.get('LibErreur'), result.get('CodErreur')))
|
||||
for demand in result['Data']['results']:
|
||||
try:
|
||||
data_administratives = demand['data_administratives']
|
||||
|
|
|
@ -147,6 +147,8 @@ class ASTech(BaseResource, HTTPResource):
|
|||
|
||||
_category_ordering = [_('Parameters'), _('Rules'), _('Demand'), 'Tech & Debug']
|
||||
|
||||
log_requests_errors = False
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('AS-TECH')
|
||||
|
||||
|
@ -159,7 +161,7 @@ class ASTech(BaseResource, HTTPResource):
|
|||
try:
|
||||
content = response.json()
|
||||
except ValueError:
|
||||
content = response.content[:1024]
|
||||
content = '%r' % response.content[:1024]
|
||||
raise APIError(
|
||||
'AS-TECH response: %s %s' % (response.status_code, response.reason),
|
||||
data={
|
||||
|
@ -220,10 +222,51 @@ class ASTech(BaseResource, HTTPResource):
|
|||
json_response = self.call_json(method, url, params=params, **kwargs)
|
||||
return json_response
|
||||
|
||||
def get_view_schema(self, view_code):
|
||||
cache_key = 'astech-%s-%s-schema' % (self.id, view_code)
|
||||
schema = cache.get(cache_key)
|
||||
if schema:
|
||||
return schema
|
||||
endpoint = 'apicli/data/%s/columns' % view_code
|
||||
columns = self.call(endpoint).get('columns', [])
|
||||
schema = {}
|
||||
for column in columns:
|
||||
column.pop('des')
|
||||
code = column.pop('code')
|
||||
if column['type'] == 'NUM':
|
||||
column['operator'] = '='
|
||||
else:
|
||||
column['operator'] = 'is_equal'
|
||||
schema[code] = column
|
||||
cache.set(cache_key, schema)
|
||||
return schema
|
||||
|
||||
def build_view_filters(self, view_code, filters):
|
||||
if not filters:
|
||||
return []
|
||||
schema = self.get_view_schema(view_code)
|
||||
filters_expression = []
|
||||
for expression in filters.split(';'):
|
||||
try:
|
||||
name, value = expression.split('=')
|
||||
except ValueError:
|
||||
continue
|
||||
if value and schema[name]['length'] and len(value) > int(schema[name]['length']):
|
||||
raise APIError(
|
||||
_('Value of %s exceeds authorized length (%s)') % (name, schema[name]['length'])
|
||||
)
|
||||
filters_expression.append(
|
||||
{
|
||||
'field': name,
|
||||
'type': schema[name]['type'],
|
||||
'filter': {'value': value, 'operator': schema[name]['operator']},
|
||||
}
|
||||
)
|
||||
return filters_expression
|
||||
|
||||
@endpoint(
|
||||
name='connections',
|
||||
description=_('See all possible connections codes (see configuration)'),
|
||||
perm='can_access',
|
||||
display_category='Tech & Debug',
|
||||
display_order=1,
|
||||
)
|
||||
|
@ -233,7 +276,6 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='authorization',
|
||||
description=_('See authorization tokens (testing only)'),
|
||||
perm='can_access',
|
||||
display_category='Tech & Debug',
|
||||
display_order=2,
|
||||
)
|
||||
|
@ -242,8 +284,7 @@ class ASTech(BaseResource, HTTPResource):
|
|||
|
||||
@endpoint(
|
||||
name='services',
|
||||
description=_("List authorized services for connected user"),
|
||||
perm='can_access',
|
||||
description=_('List authorized services for connected user'),
|
||||
display_category=_('Rules'),
|
||||
display_order=1,
|
||||
)
|
||||
|
@ -256,7 +297,6 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='company',
|
||||
description=_('Company code of the applicant'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'applicant': {
|
||||
'description': _(
|
||||
|
@ -278,7 +318,6 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='companies',
|
||||
description=_('List of authorized companies for an applicant'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'applicant': {
|
||||
'description': _(
|
||||
|
@ -305,6 +344,8 @@ class ASTech(BaseResource, HTTPResource):
|
|||
'designation': True,
|
||||
}
|
||||
companies = self.call('apicli/rule-call-by-alias/societes_demandeur/invoke', json=payload)
|
||||
if not isinstance(companies, dict):
|
||||
raise APIError('Invalid response: %s' % companies)
|
||||
companies = [{'id': str(key), 'text': value} for key, value in companies.items()]
|
||||
companies.sort(key=lambda item: item['id']) # "same as output" sort
|
||||
return {'data': companies}
|
||||
|
@ -312,7 +353,6 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='labels',
|
||||
description=_('List of predefined labels for a company'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'company': {
|
||||
'description': _('Company code (societeDemandeur). If absent, use "company" endpoint result')
|
||||
|
@ -327,14 +367,15 @@ class ASTech(BaseResource, HTTPResource):
|
|||
labels = self.call(
|
||||
'apicli/rule-call-by-alias/libelles_predefinis/invoke', json={'societeDemandeur': company}
|
||||
)
|
||||
if not isinstance(labels, dict):
|
||||
raise APIError('Invalid response: %s' % labels)
|
||||
labels = [{'id': str(key), 'text': value} for key, value in labels.items()]
|
||||
labels.sort(key=lambda item: item['id']) # "same as output" sort
|
||||
return {'data': labels}
|
||||
|
||||
@endpoint(
|
||||
name='parameter',
|
||||
description=_("Value of a parameter"),
|
||||
perm='can_access',
|
||||
description=_('Value of a parameter'),
|
||||
parameters={
|
||||
'name': {'description': _('Name of the parameter'), 'example_value': 'LIBELDEMDEF'},
|
||||
'company': {'description': _('Company code. If absent, use "company" endpoint result')},
|
||||
|
@ -354,7 +395,6 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='create-demand',
|
||||
description=_('Create a demand'),
|
||||
perm='can_access',
|
||||
methods=['post'],
|
||||
post={'request_body': {'schema': {'application/json': DEMAND_SCHEMA}}},
|
||||
display_category=_('Demand'),
|
||||
|
@ -392,7 +432,6 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='add-document',
|
||||
description=_('Add a document in a demand'),
|
||||
perm='can_access',
|
||||
methods=['post'],
|
||||
post={'request_body': {'schema': {'application/json': ADD_DOCUMENT_SCHEMA}}},
|
||||
display_category=_('Demand'),
|
||||
|
@ -414,7 +453,6 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='demand-position',
|
||||
description=_('Get demand position'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'demand_id': {
|
||||
'description': _('Demand id'),
|
||||
|
@ -436,7 +474,6 @@ class ASTech(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='demand-all-positions',
|
||||
description=_('List all demand possible positions'),
|
||||
perm='can_access',
|
||||
display_category=_('Demand'),
|
||||
display_order=4,
|
||||
)
|
||||
|
@ -447,3 +484,69 @@ class ASTech(BaseResource, HTTPResource):
|
|||
position['id'] = position['position']
|
||||
position['text'] = position['positionLib']
|
||||
return {'data': positions}
|
||||
|
||||
@endpoint(
|
||||
name='list-views',
|
||||
display_order=1,
|
||||
description=_('List available views'),
|
||||
display_category=_('Referential'),
|
||||
)
|
||||
def list_views(self, request):
|
||||
results = self.call('apicli/data/views')
|
||||
astech_views = results.get('views', [])
|
||||
for view in astech_views:
|
||||
view['id'] = view['apivId']
|
||||
view['text'] = view['apivNom']
|
||||
return {'data': astech_views}
|
||||
|
||||
@endpoint(
|
||||
name='get-view-columns',
|
||||
display_order=2,
|
||||
description=_('Get view columns'),
|
||||
display_category=_('Referential'),
|
||||
parameters={
|
||||
'code': {
|
||||
'description': _('View code'),
|
||||
'example_value': 'ASTECH_BIENS',
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_view_columns(self, request, code):
|
||||
endpoint = 'apicli/data/%s/columns' % code
|
||||
results = self.call(endpoint)
|
||||
columns = results.get('columns', [])
|
||||
for column in columns:
|
||||
column['id'] = column['code']
|
||||
column['text'] = column['des']
|
||||
return {'data': columns}
|
||||
|
||||
@endpoint(
|
||||
name='get-view-data',
|
||||
display_order=3,
|
||||
description=_('Get view data'),
|
||||
display_category=_('Referential'),
|
||||
datasource=True,
|
||||
parameters={
|
||||
'code': {
|
||||
'description': _('View code'),
|
||||
'example_value': 'ASTECH_BIENS',
|
||||
},
|
||||
'id_column': {'description': _('Name of column contaning the id'), 'example_value': 'BIEN_ID'},
|
||||
'text_column': {
|
||||
'description': _('Name of column contaning the label'),
|
||||
'example_value': 'DESIGNATION',
|
||||
},
|
||||
'filters': {
|
||||
'description': _('Semicolon separated filter expressions'),
|
||||
'example_value': 'GENRE=SIT;SECTEUR=S1',
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_view_data(self, request, code, id_column, text_column, filters=None):
|
||||
endpoint = 'apicli/data/%s/results' % code
|
||||
filters = self.build_view_filters(code, filters)
|
||||
results = self.call(endpoint, json={'data': {'filters': filters}})
|
||||
for result in results:
|
||||
result['id'] = result[id_column]
|
||||
result['text'] = result[text_column]
|
||||
return {'data': results}
|
||||
|
|
|
@ -282,7 +282,6 @@ class AstreREST(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
name='gf-documents-entites-getref',
|
||||
parameters=GF_DOCUMENTS_ENTITIES_GETFREF_PARAMS,
|
||||
)
|
||||
|
@ -294,7 +293,6 @@ class AstreREST(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
name='gf-documents-entites-list',
|
||||
parameters=GF_DOCUMENTS_ENTITIES_LIST_PARAMS,
|
||||
)
|
||||
|
@ -323,7 +321,6 @@ class AstreREST(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
name='gf-documents-entites-read',
|
||||
parameters=GF_DOCUMENTS_ENTITIES_READ_PARAMS,
|
||||
)
|
||||
|
@ -335,7 +332,6 @@ class AstreREST(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
name='gf-documents-entites-search',
|
||||
parameters=GF_DOCUMENTS_ENTITIES_SEARCH_PARAMS,
|
||||
)
|
||||
|
@ -364,7 +360,6 @@ class AstreREST(BaseResource):
|
|||
@endpoint(
|
||||
name='gf-documents-gedmanager-document-create',
|
||||
description=_('Create document'),
|
||||
perm='can_access',
|
||||
post={
|
||||
'request_body': {
|
||||
'schema': {
|
||||
|
@ -387,7 +382,6 @@ class AstreREST(BaseResource):
|
|||
@endpoint(
|
||||
name='gf-documents-gedmanager-document-delete',
|
||||
description=_('Delete document'),
|
||||
perm='can_access',
|
||||
post={
|
||||
'request_body': {
|
||||
'schema': {
|
||||
|
@ -416,7 +410,6 @@ class AstreREST(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
name='gf-documents-gedmanager-document-read',
|
||||
parameters=GF_DOCUMENTS_DOCUMENT_READ_PARAMS,
|
||||
)
|
||||
|
@ -433,7 +426,6 @@ class AstreREST(BaseResource):
|
|||
@endpoint(
|
||||
name='gf-documents-gedmanager-document-update',
|
||||
description=_('Update document'),
|
||||
perm='can_access',
|
||||
post={
|
||||
'request_body': {
|
||||
'schema': {
|
||||
|
@ -453,7 +445,7 @@ class AstreREST(BaseResource):
|
|||
)
|
||||
}
|
||||
|
||||
@endpoint(methods=['get'], perm='can_access', name='gf-documents-referentiel-domainepj')
|
||||
@endpoint(methods=['get'], name='gf-documents-referentiel-domainepj')
|
||||
def gf_documents_referentiel_domainepj(self, request):
|
||||
return {
|
||||
'data': self._get_data_source(
|
||||
|
@ -461,7 +453,7 @@ class AstreREST(BaseResource):
|
|||
)
|
||||
}
|
||||
|
||||
@endpoint(methods=['get'], perm='can_access', name='gf-documents-referentiel-typedocument')
|
||||
@endpoint(methods=['get'], name='gf-documents-referentiel-typedocument')
|
||||
def gf_documents_referentiel_typedocument(self, request):
|
||||
return {
|
||||
'data': self._get_data_source(
|
||||
|
|
|
@ -28,164 +28,164 @@ from passerelle.utils.jsonresponse import APIError
|
|||
from passerelle.utils.validation import is_number
|
||||
|
||||
ASSOCIATION_SCHEMA = {
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS assocation",
|
||||
"description": "",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"Financier",
|
||||
"CodeFamille",
|
||||
"CatTiers",
|
||||
"NomEnregistrement",
|
||||
"StatutTiers",
|
||||
"Type",
|
||||
"AdresseTitre",
|
||||
"AdresseIsAdresseDeCommande",
|
||||
"AdresseIsAdresseDeFacturation",
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS assocation',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'Financier',
|
||||
'CodeFamille',
|
||||
'CatTiers',
|
||||
'NomEnregistrement',
|
||||
'StatutTiers',
|
||||
'Type',
|
||||
'AdresseTitre',
|
||||
'AdresseIsAdresseDeCommande',
|
||||
'AdresseIsAdresseDeFacturation',
|
||||
],
|
||||
"properties": {
|
||||
"Financier": {"description": "financial association", "type": "string", "enum": ["true", "false"]},
|
||||
"CodeFamille": {
|
||||
"description": "association family code",
|
||||
"type": "string",
|
||||
'properties': {
|
||||
'Financier': {'description': 'financial association', 'type': 'string', 'enum': ['true', 'false']},
|
||||
'CodeFamille': {
|
||||
'description': 'association family code',
|
||||
'type': 'string',
|
||||
},
|
||||
"CatTiers": {
|
||||
"description": "association category",
|
||||
"type": "string",
|
||||
'CatTiers': {
|
||||
'description': 'association category',
|
||||
'type': 'string',
|
||||
},
|
||||
"NomEnregistrement": {
|
||||
"description": "association name",
|
||||
"type": "string",
|
||||
'NomEnregistrement': {
|
||||
'description': 'association name',
|
||||
'type': 'string',
|
||||
},
|
||||
"StatutTiers": {
|
||||
"description": "association status",
|
||||
"type": "string",
|
||||
"enum": ["PROPOSE", "VALIDE", "REFUSE", "BLOQUE", "A COMPLETER"],
|
||||
'StatutTiers': {
|
||||
'description': 'association status',
|
||||
'type': 'string',
|
||||
'enum': ['PROPOSE', 'VALIDE', 'REFUSE', 'BLOQUE', 'A COMPLETER'],
|
||||
},
|
||||
"Type": {"description": "association type", "type": "string", "enum": ["D", "F", "*"]},
|
||||
"NumeroSiret": {
|
||||
"description": "SIREN number",
|
||||
"type": "string",
|
||||
'Type': {'description': 'association type', 'type': 'string', 'enum': ['D', 'F', '*']},
|
||||
'NumeroSiret': {
|
||||
'description': 'SIREN number',
|
||||
'type': 'string',
|
||||
},
|
||||
"NumeroSiretFin": {
|
||||
"description": "NIC number",
|
||||
"type": "string",
|
||||
'NumeroSiretFin': {
|
||||
'description': 'NIC number',
|
||||
'type': 'string',
|
||||
},
|
||||
"AdresseTitre": {
|
||||
"type": "string",
|
||||
'AdresseTitre': {
|
||||
'type': 'string',
|
||||
},
|
||||
"AdresseIsAdresseDeCommande": {"type": "string", "enum": ["true", "false"]},
|
||||
"AdresseIsAdresseDeFacturation": {"type": "string", "enum": ["true", "false"]},
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
'AdresseIsAdresseDeCommande': {'type': 'string', 'enum': ['true', 'false']},
|
||||
'AdresseIsAdresseDeFacturation': {'type': 'string', 'enum': ['true', 'false']},
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
},
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
},
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
CONTACT_SCHEMA = {
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS contact",
|
||||
"description": "",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"CodeContact",
|
||||
"CodeTitreCivilite",
|
||||
"Nom",
|
||||
"AdresseDestinataire",
|
||||
"CodePostal",
|
||||
"Ville",
|
||||
"EncodeKeyStatut",
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS contact',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'CodeContact',
|
||||
'CodeTitreCivilite',
|
||||
'Nom',
|
||||
'AdresseDestinataire',
|
||||
'CodePostal',
|
||||
'Ville',
|
||||
'EncodeKeyStatut',
|
||||
],
|
||||
"properties": {
|
||||
"CodeContact": {
|
||||
"type": "string",
|
||||
'properties': {
|
||||
'CodeContact': {
|
||||
'type': 'string',
|
||||
},
|
||||
"CodeTitreCivilite": {
|
||||
"type": "string",
|
||||
'CodeTitreCivilite': {
|
||||
'type': 'string',
|
||||
},
|
||||
"Nom": {
|
||||
"type": "string",
|
||||
'Nom': {
|
||||
'type': 'string',
|
||||
},
|
||||
"AdresseDestinataire": {
|
||||
"type": "string",
|
||||
'AdresseDestinataire': {
|
||||
'type': 'string',
|
||||
},
|
||||
"CodePostal": {
|
||||
"type": "string",
|
||||
'CodePostal': {
|
||||
'type': 'string',
|
||||
},
|
||||
"Ville": {
|
||||
"type": "string",
|
||||
'Ville': {
|
||||
'type': 'string',
|
||||
},
|
||||
"EncodeKeyStatut": {
|
||||
"type": "string",
|
||||
'EncodeKeyStatut': {
|
||||
'type': 'string',
|
||||
},
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
},
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
},
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
DOCUMENT_SCHEMA = {
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS assocation",
|
||||
"description": "",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"Sujet",
|
||||
"Entite",
|
||||
"CodType",
|
||||
"Type",
|
||||
"hdnCodeTrt",
|
||||
"EncodeKeyEntite",
|
||||
"CodeDomaine",
|
||||
"CodDom",
|
||||
"document",
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS assocation',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'Sujet',
|
||||
'Entite',
|
||||
'CodType',
|
||||
'Type',
|
||||
'hdnCodeTrt',
|
||||
'EncodeKeyEntite',
|
||||
'CodeDomaine',
|
||||
'CodDom',
|
||||
'document',
|
||||
],
|
||||
"properties": {
|
||||
"Sujet": {
|
||||
"type": "string",
|
||||
'properties': {
|
||||
'Sujet': {
|
||||
'type': 'string',
|
||||
},
|
||||
"Entite": {
|
||||
"type": "string",
|
||||
'Entite': {
|
||||
'type': 'string',
|
||||
},
|
||||
"CodType": {
|
||||
"type": "string",
|
||||
'CodType': {
|
||||
'type': 'string',
|
||||
},
|
||||
"Type": {
|
||||
"type": "string",
|
||||
'Type': {
|
||||
'type': 'string',
|
||||
},
|
||||
"hdnCodeTrt": {
|
||||
"type": "string",
|
||||
'hdnCodeTrt': {
|
||||
'type': 'string',
|
||||
},
|
||||
"EncodeKeyEntite": {
|
||||
"type": "string",
|
||||
'EncodeKeyEntite': {
|
||||
'type': 'string',
|
||||
},
|
||||
"CodeDomaine": {
|
||||
"type": "string",
|
||||
'CodeDomaine': {
|
||||
'type': 'string',
|
||||
},
|
||||
"CodDom": {
|
||||
"type": "string",
|
||||
'CodDom': {
|
||||
'type': 'string',
|
||||
},
|
||||
"document": {
|
||||
"type": "object",
|
||||
"required": ['filename', 'content_type', 'content'],
|
||||
'document': {
|
||||
'type': 'object',
|
||||
'required': ['filename', 'content_type', 'content'],
|
||||
'properties': {
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
|
@ -198,236 +198,236 @@ DOCUMENT_SCHEMA = {
|
|||
},
|
||||
},
|
||||
},
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
},
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
},
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
GRANT_SCHEMA = {
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS grant",
|
||||
"description": "",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"Libelle",
|
||||
"LibelleCourt",
|
||||
"ModGestion",
|
||||
"TypeAide",
|
||||
"Sens",
|
||||
"CodeTiersDem",
|
||||
"CodeServiceGestionnaire",
|
||||
"CodeServiceUtilisateur",
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS grant',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'Libelle',
|
||||
'LibelleCourt',
|
||||
'ModGestion',
|
||||
'TypeAide',
|
||||
'Sens',
|
||||
'CodeTiersDem',
|
||||
'CodeServiceGestionnaire',
|
||||
'CodeServiceUtilisateur',
|
||||
],
|
||||
"properties": {
|
||||
"Libelle": {
|
||||
"type": "string",
|
||||
'properties': {
|
||||
'Libelle': {
|
||||
'type': 'string',
|
||||
},
|
||||
"LibelleCourt": {
|
||||
"type": "string",
|
||||
'LibelleCourt': {
|
||||
'type': 'string',
|
||||
},
|
||||
"ModGestion": {"type": "string", "enum": ["1", "2", "3", "4"]},
|
||||
"TypeAide": {
|
||||
"type": "string",
|
||||
'ModGestion': {'type': 'string', 'enum': ['1', '2', '3', '4']},
|
||||
'TypeAide': {
|
||||
'type': 'string',
|
||||
},
|
||||
"Sens": {
|
||||
"type": "string",
|
||||
'Sens': {
|
||||
'type': 'string',
|
||||
},
|
||||
"CodeTiersDem": {
|
||||
"type": "string",
|
||||
'CodeTiersDem': {
|
||||
'type': 'string',
|
||||
},
|
||||
"CodeServiceGestionnaire": {
|
||||
"type": "string",
|
||||
'CodeServiceGestionnaire': {
|
||||
'type': 'string',
|
||||
},
|
||||
"CodeServiceUtilisateur": {
|
||||
"type": "string",
|
||||
'CodeServiceUtilisateur': {
|
||||
'type': 'string',
|
||||
},
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
},
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
},
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
INDANA_SCHEMA = {
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS INDANA indicator",
|
||||
"description": "",
|
||||
"type": "object",
|
||||
"required": ["CodeDossier", "CodeInd_1", "AnneeInd_1", "ValInd_1"],
|
||||
"properties": {
|
||||
"CodeDossier": {
|
||||
"type": "string",
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS INDANA indicator',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'required': ['CodeDossier', 'CodeInd_1', 'AnneeInd_1', 'ValInd_1'],
|
||||
'properties': {
|
||||
'CodeDossier': {
|
||||
'type': 'string',
|
||||
},
|
||||
"CodeInd_1": {
|
||||
"type": "string",
|
||||
'CodeInd_1': {
|
||||
'type': 'string',
|
||||
},
|
||||
"AnneeInd_1": {
|
||||
"type": "string",
|
||||
'AnneeInd_1': {
|
||||
'type': 'string',
|
||||
},
|
||||
"ValInd_1": {
|
||||
"type": "string",
|
||||
'ValInd_1': {
|
||||
'type': 'string',
|
||||
},
|
||||
"IndAide": {
|
||||
"type": "string",
|
||||
'IndAide': {
|
||||
'type': 'string',
|
||||
},
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
},
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
},
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
INDANA_KEY_SCHEMA = {
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS INDANA indicator key",
|
||||
"description": "",
|
||||
"type": "object",
|
||||
"required": ["CodeDossier", "CodeInd_1", "AnneeInd_1"],
|
||||
"properties": {
|
||||
"CodeDossier": {
|
||||
"type": "string",
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS INDANA indicator key',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'required': ['CodeDossier', 'CodeInd_1', 'AnneeInd_1'],
|
||||
'properties': {
|
||||
'CodeDossier': {
|
||||
'type': 'string',
|
||||
},
|
||||
"CodeInd_1": {
|
||||
"type": "string",
|
||||
'CodeInd_1': {
|
||||
'type': 'string',
|
||||
},
|
||||
"AnneeInd_1": {
|
||||
"type": "string",
|
||||
'AnneeInd_1': {
|
||||
'type': 'string',
|
||||
},
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
},
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
},
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
TIERS_RIB_SCHEMA = {
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS TiersRib",
|
||||
"description": "TiersRib",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"CodeTiers",
|
||||
"CodePaiement",
|
||||
"LibelleCourt",
|
||||
"NumeroIban",
|
||||
"CleIban",
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS TiersRib',
|
||||
'description': 'TiersRib',
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'CodeTiers',
|
||||
'CodePaiement',
|
||||
'LibelleCourt',
|
||||
'NumeroIban',
|
||||
'CleIban',
|
||||
'CodeBic',
|
||||
"CodeDomiciliation",
|
||||
"CodeStatut",
|
||||
"CodeDevise",
|
||||
"CodeIso2Pays",
|
||||
"LibelleCompteEtranger",
|
||||
'CodeDomiciliation',
|
||||
'CodeStatut',
|
||||
'CodeDevise',
|
||||
'CodeIso2Pays',
|
||||
'LibelleCompteEtranger',
|
||||
],
|
||||
"properties": {
|
||||
"CodeDevise": {"type": "string"},
|
||||
"CodeDomiciliation": {"type": "string"},
|
||||
"CodeIso2Pays": {"type": "string"},
|
||||
"CodePaiement": {"type": "string"},
|
||||
"CodeStatut": {
|
||||
"type": "string",
|
||||
"enum": ["PROPOSE", "VALIDE", "REFUSE", "A COMPLETER", "BLOQUE", "EN MODIFICATION"],
|
||||
'properties': {
|
||||
'CodeDevise': {'type': 'string'},
|
||||
'CodeDomiciliation': {'type': 'string'},
|
||||
'CodeIso2Pays': {'type': 'string'},
|
||||
'CodePaiement': {'type': 'string'},
|
||||
'CodeStatut': {
|
||||
'type': 'string',
|
||||
'enum': ['PROPOSE', 'VALIDE', 'REFUSE', 'A COMPLETER', 'BLOQUE', 'EN MODIFICATION'],
|
||||
},
|
||||
"CodeTiers": {"type": "string"},
|
||||
"IndicateurRibDefaut": {"type": "string"},
|
||||
"LibelleCompteEtranger": {"type": "string"},
|
||||
"LibelleCourt": {"type": "string"},
|
||||
"NumeroIban": {"type": "string"},
|
||||
"CleIban": {"type": "string"},
|
||||
"CodeBic": {"type": "string"},
|
||||
"IdRib": {"type": "string"},
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
'CodeTiers': {'type': 'string'},
|
||||
'IndicateurRibDefaut': {'type': 'string'},
|
||||
'LibelleCompteEtranger': {'type': 'string'},
|
||||
'LibelleCourt': {'type': 'string'},
|
||||
'NumeroIban': {'type': 'string'},
|
||||
'CleIban': {'type': 'string'},
|
||||
'CodeBic': {'type': 'string'},
|
||||
'IdRib': {'type': 'string'},
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
},
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
},
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
TIERS_RIB_UPDATE_SCHEMA = {
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "AstreGS TiersRib",
|
||||
"description": "TiersRib Update",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"CodePaiement",
|
||||
"LibelleCourt",
|
||||
"NumeroIban",
|
||||
"CleIban",
|
||||
"CodeBic",
|
||||
"CodeDomiciliation",
|
||||
"CodeStatut",
|
||||
"CodeDevise",
|
||||
"CodeIso2Pays",
|
||||
"LibelleCompteEtranger",
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'AstreGS TiersRib',
|
||||
'description': 'TiersRib Update',
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'CodePaiement',
|
||||
'LibelleCourt',
|
||||
'NumeroIban',
|
||||
'CleIban',
|
||||
'CodeBic',
|
||||
'CodeDomiciliation',
|
||||
'CodeStatut',
|
||||
'CodeDevise',
|
||||
'CodeIso2Pays',
|
||||
'LibelleCompteEtranger',
|
||||
],
|
||||
"properties": {
|
||||
"CodeDevise": {"type": "string"},
|
||||
"CodeDomiciliation": {"type": "string"},
|
||||
"CodeIso2Pays": {"type": "string"},
|
||||
"CodePaiement": {"type": "string"},
|
||||
"CodeStatut": {
|
||||
"type": "string",
|
||||
"enum": ["PROPOSE", "VALIDE", "REFUSE", "A COMPLETER", "BLOQUE", "EN MODIFICATION"],
|
||||
'properties': {
|
||||
'CodeDevise': {'type': 'string'},
|
||||
'CodeDomiciliation': {'type': 'string'},
|
||||
'CodeIso2Pays': {'type': 'string'},
|
||||
'CodePaiement': {'type': 'string'},
|
||||
'CodeStatut': {
|
||||
'type': 'string',
|
||||
'enum': ['PROPOSE', 'VALIDE', 'REFUSE', 'A COMPLETER', 'BLOQUE', 'EN MODIFICATION'],
|
||||
},
|
||||
"IndicateurRibDefaut": {"type": "string"},
|
||||
"LibelleCompteEtranger": {"type": "string"},
|
||||
"LibelleCourt": {"type": "string"},
|
||||
"NumeroIban": {"type": "string"},
|
||||
"CleIban": {"type": "string"},
|
||||
"CodeBic": {"type": "string"},
|
||||
"organism": {
|
||||
"description": _('Organisme'),
|
||||
"type": "string",
|
||||
'IndicateurRibDefaut': {'type': 'string'},
|
||||
'LibelleCompteEtranger': {'type': 'string'},
|
||||
'LibelleCourt': {'type': 'string'},
|
||||
'NumeroIban': {'type': 'string'},
|
||||
'CleIban': {'type': 'string'},
|
||||
'CodeBic': {'type': 'string'},
|
||||
'organism': {
|
||||
'description': _('Organisme'),
|
||||
'type': 'string',
|
||||
},
|
||||
"budget": {
|
||||
"description": _('Budget'),
|
||||
"type": "string",
|
||||
'budget': {
|
||||
'description': _('Budget'),
|
||||
'type': 'string',
|
||||
},
|
||||
"exercice": {
|
||||
"description": _('Exercice'),
|
||||
"type": "string",
|
||||
'exercice': {
|
||||
'description': _('Exercice'),
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -492,7 +492,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('Find associations by SIREN number'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'siren': {'description': _('SIREN Number'), 'example_value': '77567227216096'},
|
||||
'organism': {'description': _('Organisme'), 'example_value': 'NOMDEVILLE'},
|
||||
|
@ -517,7 +516,6 @@ class AstreGS(BaseResource):
|
|||
@endpoint(
|
||||
description=_('Check if association exists by its SIRET number'),
|
||||
name='check-association-by-siret',
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'siret': {'description': _('SIRET Number'), 'example_value': '7756722721609600014'},
|
||||
'organism': {'description': _('Organisme'), 'example_value': 'NOMDEVILLE'},
|
||||
|
@ -535,7 +533,6 @@ class AstreGS(BaseResource):
|
|||
@endpoint(
|
||||
name='get-association-link-means',
|
||||
description=_('Get association linking means'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'association_id': {'description': _('Association ID'), 'example_value': '42435'},
|
||||
'NameID': {'description': _('Publik ID'), 'example_value': 'xyz24d934'},
|
||||
|
@ -585,7 +582,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('Create link between user and association'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'NameID': {
|
||||
'description': _('Publik NameID'),
|
||||
|
@ -619,7 +615,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('Remove link between user and association'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'NameID': {'description': _('Publik NameID'), 'example_value': 'xyz24d934'},
|
||||
'association_id': {'description': _('Association ID'), 'example_value': '12345'},
|
||||
|
@ -635,7 +630,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('List user links'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'NameID': {
|
||||
'description': _('Publik NameID'),
|
||||
|
@ -665,7 +659,6 @@ class AstreGS(BaseResource):
|
|||
return {'data': data}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
name='create-association',
|
||||
post={
|
||||
'description': _('Creates an association'),
|
||||
|
@ -680,7 +673,6 @@ class AstreGS(BaseResource):
|
|||
@endpoint(
|
||||
description=_('Get association informations'),
|
||||
name='get-association-by-id',
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'association_id': {'description': _('Association ID'), 'example_value': '42435'},
|
||||
'NameID': {'description': _('Publik ID'), 'example_value': 'xyz24d934'},
|
||||
|
@ -701,7 +693,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='get-contact',
|
||||
perm='can_access',
|
||||
description=_('Get contact details'),
|
||||
parameters={
|
||||
'contact_id': {
|
||||
|
@ -720,7 +711,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='create-contact',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Create contact'),
|
||||
'request_body': {'schema': {'application/json': CONTACT_SCHEMA}},
|
||||
|
@ -737,7 +727,6 @@ class AstreGS(BaseResource):
|
|||
@endpoint(
|
||||
description=_('Delete contact'),
|
||||
name='delete-contact',
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'contact_id': {'description': _('Contact ID'), 'example_value': '4242'},
|
||||
'organism': {'description': _('Organisme'), 'example_value': 'NOMDEVILLE'},
|
||||
|
@ -752,7 +741,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='create-document',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Create document'),
|
||||
'request_body': {'schema': {'application/json': DOCUMENT_SCHEMA}},
|
||||
|
@ -769,7 +757,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='create-grant-demand',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Create grant demand'),
|
||||
'request_body': {'schema': {'application/json': GRANT_SCHEMA}},
|
||||
|
@ -782,7 +769,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='create-indana-indicator',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Create indana indicator'),
|
||||
'request_body': {'schema': {'application/json': INDANA_SCHEMA}},
|
||||
|
@ -795,7 +781,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='update-indana-indicator',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Update indana indicator'),
|
||||
'request_body': {'schema': {'application/json': INDANA_SCHEMA}},
|
||||
|
@ -808,7 +793,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='delete-indana-indicator',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Delete indana indicator'),
|
||||
'request_body': {'schema': {'application/json': INDANA_KEY_SCHEMA}},
|
||||
|
@ -821,7 +805,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='create-tiers-rib',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Create RIB'),
|
||||
'request_body': {'schema': {'application/json': TIERS_RIB_SCHEMA}},
|
||||
|
@ -834,7 +817,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='get-tiers-rib',
|
||||
perm='can_access',
|
||||
description=_('Get RIB'),
|
||||
parameters={
|
||||
'CodeTiers': {'example_value': '42435'},
|
||||
|
@ -852,7 +834,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='update-tiers-rib',
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Update RIB'),
|
||||
'request_body': {'schema': {'application/json': TIERS_RIB_UPDATE_SCHEMA}},
|
||||
|
@ -871,7 +852,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='delete-tiers-rib',
|
||||
perm='can_access',
|
||||
description=_('Delete RIB'),
|
||||
parameters={
|
||||
'CodeTiers': {'example_value': '42435'},
|
||||
|
@ -889,7 +869,6 @@ class AstreGS(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='find-tiers-by-rib',
|
||||
perm='can_access',
|
||||
description=_('Find person by RIB'),
|
||||
parameters={
|
||||
'banque': {'example_value': '30001'},
|
||||
|
@ -912,13 +891,12 @@ class AstreGS(BaseResource):
|
|||
for item in r.liste.EnregRechercheTiersReturn:
|
||||
tiers_data = serialize_object(item)
|
||||
tiers_data['id'] = tiers_data['N']
|
||||
tiers_data['text'] = '%{Nom_Enregistrement}s (%{N}s)'.format(**tiers_data)
|
||||
tiers_data['text'] = '{Nom_Enregistrement} ({N})'.format(**tiers_data)
|
||||
data.append(tiers_data)
|
||||
return {'data': data}
|
||||
|
||||
@endpoint(
|
||||
name='get-dossier',
|
||||
perm='can_access',
|
||||
description=_('Get Dossier'),
|
||||
parameters={
|
||||
'CodeDossier': {'example_value': '2021-0004933'},
|
||||
|
|
|
@ -23,6 +23,7 @@ from django.db import models
|
|||
from django.utils import dateformat, dateparse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from zeep import helpers
|
||||
from zeep.exceptions import Fault
|
||||
|
||||
from passerelle.base.models import BaseResource
|
||||
from passerelle.utils.api import endpoint
|
||||
|
@ -80,24 +81,23 @@ class ATALConnector(BaseResource):
|
|||
"""
|
||||
self._soap_client(wsdl='DemandeService')
|
||||
|
||||
@endpoint(methods=['get'], perm='can_access', name='get-thematique')
|
||||
@endpoint(methods=['get'], name='get-thematique')
|
||||
def get_thematique(self, request):
|
||||
return self._xml_ref('DemandeService', 'getThematiqueATAL', 'thematiques')
|
||||
|
||||
@endpoint(methods=['get'], perm='can_access', name='get-type-activite')
|
||||
@endpoint(methods=['get'], name='get-type-activite')
|
||||
def get_type_activite(self, request):
|
||||
return self._basic_ref('VilleAgileService', 'getTypeActivite')
|
||||
|
||||
@endpoint(methods=['get'], perm='can_access', name='get-type-de-voie')
|
||||
@endpoint(methods=['get'], name='get-type-de-voie')
|
||||
def get_type_de_voie(self, request):
|
||||
return self._basic_ref('VilleAgileService', 'getTypeDeVoie')
|
||||
|
||||
@endpoint(methods=['get'], perm='can_access', name='get-types-equipement')
|
||||
@endpoint(methods=['get'], name='get-types-equipement')
|
||||
def get_types_equipement(self, request):
|
||||
return self._xml_ref('VilleAgileService', 'getTypesEquipement', 'types')
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
name='insert-action-comment',
|
||||
post={
|
||||
'description': _('Insert action comment'),
|
||||
|
@ -114,7 +114,6 @@ class ATALConnector(BaseResource):
|
|||
return process_response(demande_number)
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
name='insert-demande-complet-by-type',
|
||||
post={
|
||||
'description': _('Insert demande complet by type'),
|
||||
|
@ -171,7 +170,6 @@ class ATALConnector(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
example_pattern='{demande_number}/',
|
||||
pattern=r'^(?P<demande_number>\w+)/$',
|
||||
name='retrieve-details-demande',
|
||||
|
@ -185,7 +183,6 @@ class ATALConnector(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
example_pattern='{demande_number}/',
|
||||
pattern=r'^(?P<demande_number>\w+)/$',
|
||||
name='retrieve-etat-travaux',
|
||||
|
@ -197,7 +194,6 @@ class ATALConnector(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
example_pattern='{demande_number}/',
|
||||
pattern=r'^(?P<demande_number>\w+)/$',
|
||||
parameters={
|
||||
|
@ -265,7 +261,6 @@ class ATALConnector(BaseResource):
|
|||
return {'data': data}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
post={
|
||||
'description': _('Upload a file'),
|
||||
'request_body': {'schema': {'application/json': schemas.UPLOAD}},
|
||||
|
@ -286,12 +281,15 @@ class ATALConnector(BaseResource):
|
|||
'numeroDemande': post_data['numero_demande'],
|
||||
'nomFichier': filename,
|
||||
}
|
||||
self._soap_call(wsdl='ChargementPiecesJointesService', method='upload', **data)
|
||||
try:
|
||||
self._soap_call(wsdl='ChargementPiecesJointesService', method='upload', **data)
|
||||
except Fault as e:
|
||||
raise APIError(str(e))
|
||||
|
||||
return {}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
perm='can_access',
|
||||
example_pattern='{demande_number}/',
|
||||
pattern=r'^(?P<demande_number>\w+)/$',
|
||||
name='new-comments',
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
# Generated by Django 3.2.18 on 2023-06-26 15:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = []
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AtalREST',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
|
||||
),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
(
|
||||
'basic_auth_username',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication username'
|
||||
),
|
||||
),
|
||||
(
|
||||
'basic_auth_password',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication password'
|
||||
),
|
||||
),
|
||||
(
|
||||
'client_certificate',
|
||||
models.FileField(
|
||||
blank=True, null=True, upload_to='', verbose_name='TLS client certificate'
|
||||
),
|
||||
),
|
||||
(
|
||||
'trusted_certificate_authorities',
|
||||
models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs'),
|
||||
),
|
||||
(
|
||||
'verify_cert',
|
||||
models.BooleanField(blank=True, default=True, verbose_name='TLS verify certificates'),
|
||||
),
|
||||
(
|
||||
'http_proxy',
|
||||
models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'),
|
||||
),
|
||||
('base_url', models.URLField(verbose_name='API URL')),
|
||||
('api_key', models.CharField(max_length=1024, verbose_name='API key')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Atal REST',
|
||||
},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,539 @@
|
|||
# passerelle - uniform access to multiple data sources and services
|
||||
# Copyright (C) 2023 Entr'ouvert
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import base64
|
||||
import binascii
|
||||
import collections
|
||||
import io
|
||||
import json
|
||||
import urllib
|
||||
|
||||
import requests
|
||||
from django.db import models
|
||||
from django.utils import dateparse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from passerelle.base.models import BaseResource, HTTPResource
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
|
||||
FILE_OBJECT = {
|
||||
'type': 'object',
|
||||
'description': 'File object',
|
||||
'required': ['content'],
|
||||
'properties': {
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'description': 'Filename',
|
||||
},
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'description': 'Content',
|
||||
},
|
||||
'content_type': {
|
||||
'type': 'string',
|
||||
'description': 'Content type',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
SINGLE_ATTACHMENT_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'file': {
|
||||
'oneOf': [
|
||||
FILE_OBJECT,
|
||||
{'type': 'string', 'description': 'empty file, do not consider', 'pattern': r'^$'},
|
||||
{'type': 'null', 'description': 'empty file, do not consider'},
|
||||
]
|
||||
}
|
||||
},
|
||||
'required': ['file'],
|
||||
}
|
||||
|
||||
|
||||
ATTACHMENTS_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'files': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'oneOf': [
|
||||
FILE_OBJECT,
|
||||
{'type': 'string', 'description': 'empty file, do not consider', 'pattern': r'^$'},
|
||||
{'type': 'null', 'description': 'empty file, do not consider'},
|
||||
]
|
||||
},
|
||||
},
|
||||
'worksrequests_ids': {'type': 'array', 'items': {'type': 'string'}},
|
||||
},
|
||||
'required': ['files', 'worksrequests_ids'],
|
||||
'unflatten': True,
|
||||
}
|
||||
|
||||
|
||||
WORKSREQUESTS_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'properties': collections.OrderedDict(
|
||||
{
|
||||
'activity_nature_id': {'type': 'string'},
|
||||
'comments': {'type': 'string'},
|
||||
'contact': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'adress1': {'type': 'string'},
|
||||
'city': {'type': 'string'},
|
||||
'email': {'type': 'string'},
|
||||
'first_name': {'type': 'string'},
|
||||
'last_name': {'type': 'string'},
|
||||
'mobile': {'type': 'string'},
|
||||
'phone': {'type': 'string'},
|
||||
'zipcode': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
'description': {'type': 'string'},
|
||||
'desired_date': {'type': 'string', 'description': 'format YYYY-MM-DD'},
|
||||
'keywords': {'type': 'string'},
|
||||
'latitude': {
|
||||
'oneOf': [
|
||||
{'type': 'number'},
|
||||
{'type': 'string'},
|
||||
]
|
||||
},
|
||||
'localization': {'type': 'string'},
|
||||
'longitude': {
|
||||
'oneOf': [
|
||||
{'type': 'number'},
|
||||
{'type': 'string'},
|
||||
]
|
||||
},
|
||||
'object': {'type': 'string'},
|
||||
'operator': {'type': 'string'},
|
||||
'patrimony_id': {'type': 'string'},
|
||||
'priority_id': {'type': 'string'},
|
||||
'recipient_id': {'type': 'string'},
|
||||
'request_date': {
|
||||
'type': 'string',
|
||||
'description': 'format YYYY-MM-DD',
|
||||
},
|
||||
'requester_id': {'type': 'string'},
|
||||
'requesting_department_id': {'type': 'string'},
|
||||
'request_type': {'type': 'string'},
|
||||
'suggested_recipient_id': {'type': 'string'},
|
||||
'thematic_ids': {'type': 'array', 'items': {'type': 'string'}},
|
||||
}
|
||||
),
|
||||
'required': ['object', 'recipient_id', 'requester_id', 'requesting_department_id'],
|
||||
'unflatten': True,
|
||||
}
|
||||
|
||||
STATUS_MAP = {
|
||||
0: 'En attente',
|
||||
1: 'En analyse',
|
||||
2: 'Acceptée',
|
||||
3: 'Refusée',
|
||||
4: 'Annulée',
|
||||
5: 'Ajournée',
|
||||
6: 'Brouillon',
|
||||
7: 'Redirigée',
|
||||
8: 'Prise en compte',
|
||||
9: 'Clôturée',
|
||||
13: 'Archivée',
|
||||
14: 'À spécifier',
|
||||
15: 'À valider',
|
||||
}
|
||||
|
||||
|
||||
INTERVENTION_STATUS_MAP = {
|
||||
1: 'Pas commencé',
|
||||
2: 'En cours',
|
||||
4: 'Terminé',
|
||||
5: 'Fermé',
|
||||
}
|
||||
|
||||
|
||||
def to_ds(record):
|
||||
record['id'] = record['Id']
|
||||
record['text'] = record['Name']
|
||||
return record
|
||||
|
||||
|
||||
class AtalREST(BaseResource, HTTPResource):
|
||||
base_url = models.URLField(_('API URL'))
|
||||
api_key = models.CharField(max_length=1024, verbose_name=_('API key'))
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Atal REST')
|
||||
|
||||
def _call(
|
||||
self, path, method='get', params=None, data=None, json_data=None, files=None, return_response=False
|
||||
):
|
||||
url = urllib.parse.urljoin(self.base_url, path)
|
||||
kwargs = {}
|
||||
kwargs['headers'] = {'X-API-Key': self.api_key}
|
||||
if params:
|
||||
kwargs['params'] = params
|
||||
|
||||
if method == 'post':
|
||||
if not json_data:
|
||||
json_data = {}
|
||||
kwargs['json'] = json_data
|
||||
if files:
|
||||
kwargs['files'] = files
|
||||
if data:
|
||||
kwargs['data'] = data
|
||||
|
||||
try:
|
||||
resp = self.requests.request(url=url, method=method, **kwargs)
|
||||
except (requests.Timeout, requests.RequestException) as e:
|
||||
raise APIError(str(e))
|
||||
try:
|
||||
resp.raise_for_status()
|
||||
except requests.RequestException as main_exc:
|
||||
try:
|
||||
err_data = resp.json()
|
||||
except (json.JSONDecodeError, requests.exceptions.RequestException):
|
||||
err_data = {'response_text': resp.text}
|
||||
raise APIError(str(main_exc), data=err_data)
|
||||
|
||||
if return_response:
|
||||
return resp
|
||||
|
||||
try:
|
||||
return resp.json()
|
||||
except (json.JSONDecodeError, requests.exceptions.RequestException) as e:
|
||||
raise APIError(str(e))
|
||||
|
||||
def check_status(self):
|
||||
return self._call('api/Test', return_response=True)
|
||||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
name='thirdparties-requesting-departments',
|
||||
description=_('Get the third parties requesting departments referential'),
|
||||
parameters={
|
||||
'request_type': {
|
||||
'example_value': '1001',
|
||||
}
|
||||
},
|
||||
)
|
||||
def thirdparties_requesting_departments(self, request, request_type):
|
||||
return {
|
||||
'data': [
|
||||
to_ds(record)
|
||||
for record in self._call(
|
||||
'api/ThirdParties/RequestingDepartments', params={'RequestType': request_type}
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
description=_('Get the users referential'),
|
||||
)
|
||||
def users(self, request):
|
||||
return {'data': [to_ds(record) for record in self._call('api/Users')]}
|
||||
|
||||
@endpoint(
|
||||
description=_('Create a works request'),
|
||||
post={
|
||||
'request_body': {
|
||||
'schema': {
|
||||
'application/json': WORKSREQUESTS_SCHEMA,
|
||||
}
|
||||
},
|
||||
'input_example': {
|
||||
'activity_nature_id': '0',
|
||||
'comments': 'some comment',
|
||||
'contact/adress1': '1 rue des cinq diamants',
|
||||
'contact/city': 'paris',
|
||||
'contact/email': 'foo@bar.invalid',
|
||||
'contact/first_name': 'john',
|
||||
'contact/last_name': 'doe',
|
||||
'contact/mobile': '0606060606',
|
||||
'contact/phone': '0101010101',
|
||||
'contact/zipcode': '75013',
|
||||
'description': 'some description',
|
||||
'desired_date': '2023-06-28',
|
||||
'keywords': 'foo bar',
|
||||
'latitude': '0',
|
||||
'localization': 'somewhere',
|
||||
'longitude': '0',
|
||||
'object': 'some object',
|
||||
'operator': 'some operator',
|
||||
'patrimony_id': '0',
|
||||
'priority_id': '0',
|
||||
'recipient_id': '0',
|
||||
'request_date': '2023-06-27',
|
||||
'requester_id': '0',
|
||||
'requesting_department_id': '0',
|
||||
'request_type': '0',
|
||||
'suggested_recipient_id': {'type': 'string'},
|
||||
'thematic_ids/0': '1',
|
||||
'thematic_ids/1': '2',
|
||||
},
|
||||
},
|
||||
)
|
||||
def worksrequests(self, request, post_data):
|
||||
data = {}
|
||||
int_params = {
|
||||
'activity_nature_id': 'ActivityNatureId',
|
||||
'patrimony_id': 'PatrimonyId',
|
||||
'priority_id': 'PriorityId',
|
||||
'recipient_id': 'RecipientId',
|
||||
'requester_id': 'RequesterId',
|
||||
'requesting_department_id': 'RequestingDepartmentId',
|
||||
'request_type': 'RequestType',
|
||||
'suggested_recipient_id': 'SuggestedRecipientId',
|
||||
}
|
||||
for param, atal_param in int_params.items():
|
||||
if param in post_data:
|
||||
try:
|
||||
data[atal_param] = int(post_data[param])
|
||||
except ValueError:
|
||||
raise APIError('%s must be an integer' % param)
|
||||
|
||||
float_params = {
|
||||
'latitude': 'Latitude',
|
||||
'longitude': 'Longitude',
|
||||
}
|
||||
for param, atal_param in float_params.items():
|
||||
param_value = post_data.get(param, '')
|
||||
if param_value:
|
||||
if isinstance(param_value, str):
|
||||
param_value = param_value.replace(',', '.')
|
||||
try:
|
||||
data[atal_param] = float(param_value)
|
||||
except ValueError:
|
||||
raise APIError('%s must be a float' % param)
|
||||
|
||||
if 'thematic_ids' in post_data:
|
||||
data['ThematicIds'] = []
|
||||
for thematic_id in post_data['thematic_ids']:
|
||||
try:
|
||||
data['ThematicIds'].append(int(thematic_id))
|
||||
except ValueError:
|
||||
raise APIError('a thematic identifier must be an intenger')
|
||||
|
||||
datetime_params = {
|
||||
'desired_date': 'DesiredDate',
|
||||
'request_date': 'RequestDate',
|
||||
}
|
||||
for param, atal_param in datetime_params.items():
|
||||
if param in post_data:
|
||||
try:
|
||||
obj = dateparse.parse_date(post_data[param])
|
||||
except ValueError:
|
||||
obj = None
|
||||
if obj is None:
|
||||
raise APIError(
|
||||
'%s must be a valid YYYY-MM-DD date (received: "%s")' % (param, post_data[param])
|
||||
)
|
||||
data[atal_param] = obj.isoformat()
|
||||
|
||||
contact_params = {
|
||||
'adress1': 'Adress1',
|
||||
'city': 'City',
|
||||
'email': 'Email',
|
||||
'first_name': 'FirstName',
|
||||
'last_name': 'LastName',
|
||||
'mobile': 'Mobile',
|
||||
'phone': 'Phone',
|
||||
'zipcode': 'ZipCode',
|
||||
}
|
||||
if 'contact' in post_data:
|
||||
data['Contact'] = {}
|
||||
for param, atal_param in contact_params.items():
|
||||
if param in post_data['contact']:
|
||||
data['Contact'][atal_param] = post_data['contact'][param]
|
||||
|
||||
string_params = {
|
||||
'comments': 'Comments',
|
||||
'description': 'Description',
|
||||
'keywords': 'Keywords',
|
||||
'localization': 'Localization',
|
||||
'object': 'Object',
|
||||
'operator': 'Operator',
|
||||
}
|
||||
for param, atal_param in string_params.items():
|
||||
if param in post_data:
|
||||
data[atal_param] = post_data[param]
|
||||
|
||||
resp_data = self._call('api/WorksRequests', method='post', json_data=data)
|
||||
resp_data['RequestStateLabel'] = STATUS_MAP.get(resp_data.get('RequestState', ''), '')
|
||||
return {'data': resp_data}
|
||||
|
||||
@endpoint(
|
||||
description=_('Add an attachment to a works requests'),
|
||||
name='worksrequests-single-attachment',
|
||||
post={
|
||||
'request_body': {
|
||||
'schema': {
|
||||
'application/json': SINGLE_ATTACHMENT_SCHEMA,
|
||||
}
|
||||
},
|
||||
'input_example': {
|
||||
'file': {
|
||||
'filename': 'example-1.pdf',
|
||||
'content_type': 'application/pdf',
|
||||
'content': 'JVBERi0xL...(base64 PDF)...',
|
||||
},
|
||||
},
|
||||
},
|
||||
parameters={
|
||||
'worksrequests_id': {
|
||||
'example_value': '1',
|
||||
}
|
||||
},
|
||||
)
|
||||
def worksrequests_single_attachment(self, request, worksrequests_id, post_data):
|
||||
if not post_data['file']:
|
||||
return {}
|
||||
try:
|
||||
content = base64.b64decode(post_data['file']['content'])
|
||||
except (TypeError, binascii.Error):
|
||||
raise APIError('Invalid file content')
|
||||
|
||||
files = {
|
||||
'File': (
|
||||
post_data['file'].get('filename', ''),
|
||||
io.BytesIO(content).read(),
|
||||
post_data['file'].get('content_type', ''),
|
||||
)
|
||||
}
|
||||
# return nothing if successful
|
||||
self._call(
|
||||
'api/WorksRequests/%s/Attachments' % worksrequests_id,
|
||||
method='post',
|
||||
files=files,
|
||||
return_response=True,
|
||||
)
|
||||
return {}
|
||||
|
||||
@endpoint(
|
||||
description=_('Add attachments to multiple works requests'),
|
||||
name='worksrequests-attachments',
|
||||
post={
|
||||
'request_body': {
|
||||
'schema': {
|
||||
'application/json': ATTACHMENTS_SCHEMA,
|
||||
}
|
||||
},
|
||||
'input_example': {
|
||||
'files/0': {
|
||||
'filename': 'example-1.pdf',
|
||||
'content_type': 'application/pdf',
|
||||
'content': 'JVBERi0xL...(base64 PDF)...',
|
||||
},
|
||||
'files/1': {
|
||||
'filename': 'example-2.pdf',
|
||||
'content_type': 'application/pdf',
|
||||
'content': 'JVBERi0xL...(base64 PDF)...',
|
||||
},
|
||||
'worksrequests_ids/0': '1',
|
||||
'worksrequests_ids/1': '2',
|
||||
},
|
||||
},
|
||||
)
|
||||
def worksrequests_attachments(self, request, post_data):
|
||||
files = []
|
||||
for file_ in post_data.get('files', []):
|
||||
if not file_:
|
||||
continue
|
||||
try:
|
||||
content = base64.b64decode(file_['content'])
|
||||
except (TypeError, binascii.Error):
|
||||
raise APIError('Invalid file content')
|
||||
files.append(
|
||||
(
|
||||
'Files',
|
||||
(
|
||||
file_.get('filename', ''),
|
||||
io.BytesIO(content).read(),
|
||||
file_.get('content_type', ''),
|
||||
),
|
||||
)
|
||||
)
|
||||
if not files:
|
||||
return {}
|
||||
data = {'Ids': post_data['worksrequests_ids']}
|
||||
# return nothing if successful
|
||||
self._call(
|
||||
'api/WorksRequests/Attachments',
|
||||
method='post',
|
||||
files=files,
|
||||
data=data,
|
||||
return_response=True,
|
||||
)
|
||||
return {}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
name='worksrequest-status',
|
||||
description=_('Get the status of a works request'),
|
||||
parameters={
|
||||
'worksrequests_id': {
|
||||
'example_value': '1',
|
||||
},
|
||||
'filter_responses': {
|
||||
'example_value': '501,507',
|
||||
},
|
||||
},
|
||||
)
|
||||
def worksrequest_status(self, request, worksrequests_id, filter_responses=None):
|
||||
filter_responses = (
|
||||
[type_id.strip() for type_id in filter_responses.split(',') if type_id.strip()]
|
||||
if filter_responses
|
||||
else []
|
||||
)
|
||||
action_type_ids = []
|
||||
for type_id in filter_responses:
|
||||
try:
|
||||
action_type_ids.append(int(type_id))
|
||||
except ValueError:
|
||||
raise APIError('filter_responses must be a list of integer')
|
||||
|
||||
resp_data = self._call('api/WorksRequests/%s' % worksrequests_id, params={'$expand': 'Responses'})
|
||||
resp_data['RequestStateLabel'] = STATUS_MAP.get(resp_data.get('RequestState', ''), '')
|
||||
if action_type_ids:
|
||||
responses = resp_data.pop('Responses', [])
|
||||
resp_data['Responses'] = [
|
||||
resp for resp in responses if resp.get('ActionTypeId') in action_type_ids
|
||||
] or []
|
||||
return {'data': resp_data}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'],
|
||||
name='worksrequest-intervention-status',
|
||||
description=_('Get the status of a works request intervention'),
|
||||
parameters={
|
||||
'number': {
|
||||
'example_value': 'DIT23070011',
|
||||
}
|
||||
},
|
||||
)
|
||||
def worksrequest_intervention_status(self, request, number):
|
||||
resp_data = self._call('/api/WorksRequests/GetInterventionStates', params={'number': number})
|
||||
resp_data = resp_data[0] if resp_data else {}
|
||||
resp_data['WorkStateLabel'] = INTERVENTION_STATUS_MAP.get(resp_data.get('WorkState', ''), '')
|
||||
return {'data': resp_data}
|
|
@ -199,7 +199,6 @@ class Resource(BaseResource, HTTPResource):
|
|||
name='link',
|
||||
methods=['post'],
|
||||
description=_('Create link with an extranet account'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'NameID': {
|
||||
'description': _('Publik NameID'),
|
||||
|
@ -233,7 +232,6 @@ class Resource(BaseResource, HTTPResource):
|
|||
name='unlink',
|
||||
methods=['post'],
|
||||
description=_('Delete link with an extranet account'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'NameID': {
|
||||
'description': _('Publik NameID'),
|
||||
|
@ -292,7 +290,6 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='dossiers',
|
||||
description=_('Get datas for all links'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'NameID': {
|
||||
'description': _('Publik NameID'),
|
||||
|
@ -372,7 +369,6 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='search',
|
||||
description=_('Search for beneficiaries'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'first_name': {
|
||||
'description': _('Beneficiary first name'),
|
||||
|
@ -506,7 +502,6 @@ class Resource(BaseResource, HTTPResource):
|
|||
name='link-by-id-per',
|
||||
methods=['post'],
|
||||
description=_('Create link with an extranet account'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'NameID': {
|
||||
'description': _('Publik NameID'),
|
||||
|
@ -526,7 +521,6 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='dossier-by-pair',
|
||||
description=_('Get dossier data with two integers'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'p1': {
|
||||
'description': _('First integer'),
|
||||
|
|
|
@ -91,6 +91,7 @@ class Migration(migrations.Migration):
|
|||
blank=True,
|
||||
max_length=600,
|
||||
verbose_name='Postal codes or department number to get streets, separated with commas',
|
||||
help_text='This parameter is only useful for the /streets/ endpoint (very rarely used)',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
|
|
|
@ -6,19 +6,19 @@ from django.db import migrations, models
|
|||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("base_adresse", "0018_text_to_jsonb"),
|
||||
('base_adresse', '0018_text_to_jsonb'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="streetmodel",
|
||||
name="resource",
|
||||
model_name='streetmodel',
|
||||
name='resource',
|
||||
field=models.ForeignKey(
|
||||
default=None,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="base_adresse.BaseAdresse",
|
||||
verbose_name="BAN Connector",
|
||||
to='base_adresse.BaseAdresse',
|
||||
verbose_name='BAN Connector',
|
||||
),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -4,8 +4,8 @@ from django.db import migrations
|
|||
|
||||
|
||||
def set_streetmodel_resource(apps, schema_editor):
|
||||
BaseAdresse = apps.get_model("base_adresse", "BaseAdresse")
|
||||
StreetModel = apps.get_model("base_adresse", "StreetModel")
|
||||
BaseAdresse = apps.get_model('base_adresse', 'BaseAdresse')
|
||||
StreetModel = apps.get_model('base_adresse', 'StreetModel')
|
||||
if BaseAdresse.objects.exists():
|
||||
StreetModel.objects.update(resource=BaseAdresse.objects.first())
|
||||
else:
|
||||
|
@ -14,7 +14,7 @@ def set_streetmodel_resource(apps, schema_editor):
|
|||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("base_adresse", "0019_streetmodel_resource_add"),
|
||||
('base_adresse', '0019_streetmodel_resource_add'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
|
|
|
@ -6,17 +6,17 @@ from django.db import migrations, models
|
|||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("base_adresse", "0020_streetmodel_resource_runpython"),
|
||||
('base_adresse', '0020_streetmodel_resource_runpython'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="streetmodel",
|
||||
name="resource",
|
||||
model_name='streetmodel',
|
||||
name='resource',
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="base_adresse.BaseAdresse",
|
||||
verbose_name="BAN Connector",
|
||||
to='base_adresse.BaseAdresse',
|
||||
verbose_name='BAN Connector',
|
||||
),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -4,11 +4,11 @@ from django.db import migrations
|
|||
|
||||
|
||||
def set_resource(apps, schema_editor):
|
||||
BaseAdresse = apps.get_model("base_adresse", "BaseAdresse")
|
||||
RegionModel = apps.get_model("base_adresse", "RegionModel")
|
||||
DepartmentModel = apps.get_model("base_adresse", "DepartmentModel")
|
||||
CityModel = apps.get_model("base_adresse", "CityModel")
|
||||
AddressCacheModel = apps.get_model("base_adresse", "AddressCacheModel")
|
||||
BaseAdresse = apps.get_model('base_adresse', 'BaseAdresse')
|
||||
RegionModel = apps.get_model('base_adresse', 'RegionModel')
|
||||
DepartmentModel = apps.get_model('base_adresse', 'DepartmentModel')
|
||||
CityModel = apps.get_model('base_adresse', 'CityModel')
|
||||
AddressCacheModel = apps.get_model('base_adresse', 'AddressCacheModel')
|
||||
if BaseAdresse.objects.exists():
|
||||
resource = BaseAdresse.objects.first()
|
||||
RegionModel.objects.update(resource=resource)
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
# Generated by Django 3.2.18 on 2023-11-29 18:06
|
||||
|
||||
import django.contrib.postgres.indexes
|
||||
import django.db.models.functions.text
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('base_adresse', '0030_auto_20220627_1511'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
[
|
||||
'CREATE EXTENSION IF NOT EXISTS pg_trgm WITH SCHEMA public',
|
||||
],
|
||||
reverse_sql=['DROP EXTENSION IF EXISTS pg_trgm'],
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='citymodel',
|
||||
index=models.Index(fields=['code'], name='base_adress_code_e169d0_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='citymodel',
|
||||
index=models.Index(fields=['zipcode'], name='base_adress_zipcode_79aa6f_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='citymodel',
|
||||
index=models.Index(
|
||||
django.db.models.functions.text.Upper('unaccent_name'), name='base_adresse_city_name_idx'
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='departmentmodel',
|
||||
index=models.Index(
|
||||
django.db.models.functions.text.Upper('unaccent_name'), name='base_adresse_dept_name_idx'
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='regionmodel',
|
||||
index=models.Index(
|
||||
django.db.models.functions.text.Upper('unaccent_name'), name='base_adresse_region_name_idx'
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='streetmodel',
|
||||
index=models.Index(fields=['ban_id'], name='base_adress_ban_id_2c35ab_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='streetmodel',
|
||||
index=models.Index(fields=['zipcode'], name='base_adress_zipcode_bf7091_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='streetmodel',
|
||||
index=models.Index(fields=['citycode'], name='base_adress_citycod_428b79_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='streetmodel',
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
django.contrib.postgres.indexes.OpClass(
|
||||
django.db.models.functions.text.Upper('unaccent_name'), 'public.gin_trgm_ops'
|
||||
),
|
||||
name='base_adresse_street_name_idx',
|
||||
),
|
||||
),
|
||||
]
|
|
@ -5,9 +5,11 @@ import json
|
|||
from io import StringIO
|
||||
from urllib import parse as urlparse
|
||||
|
||||
from django.contrib.postgres import indexes as postgresql_indexes
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db import connection, models
|
||||
from django.db.models import JSONField, Q
|
||||
from django.db.models.functions import Upper
|
||||
from django.utils import timezone
|
||||
from django.utils.http import urlencode
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
@ -44,12 +46,6 @@ class BaseAdresse(AddressResource):
|
|||
'<a href="https://api.gouv.fr/api/api-geo.html">API Geo</a>.'
|
||||
)
|
||||
|
||||
zipcode = models.CharField(
|
||||
max_length=600,
|
||||
blank=True,
|
||||
verbose_name=_('Postal codes or department number to get streets, separated with commas'),
|
||||
)
|
||||
|
||||
latitude = models.FloatField(
|
||||
null=True,
|
||||
blank=True,
|
||||
|
@ -63,6 +59,13 @@ class BaseAdresse(AddressResource):
|
|||
help_text=_('Geographic priority for /addresses/ endpoint.'),
|
||||
)
|
||||
|
||||
zipcode = models.CharField(
|
||||
max_length=600,
|
||||
blank=True,
|
||||
verbose_name=_('Postal codes or department number to get streets, separated with commas'),
|
||||
help_text=_('This parameter is only useful for the /streets/ endpoint (very rarely used)'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Base Adresse Web Service')
|
||||
|
||||
|
@ -109,6 +112,7 @@ class BaseAdresse(AddressResource):
|
|||
@endpoint(
|
||||
pattern='(?P<q>.+)?$',
|
||||
description=_('Addresses list'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'id': {'description': _('Address identifier')},
|
||||
'q': {'description': _('Address'), 'example_value': '169 rue du chateau, paris'},
|
||||
|
@ -125,10 +129,24 @@ class BaseAdresse(AddressResource):
|
|||
'Prioritize results according to coordinates. "lat" parameter must also be present.'
|
||||
)
|
||||
},
|
||||
'type': {
|
||||
'description': _(
|
||||
'Type of address to return, housenumber, street, locality, municipality or all. Default is all.'
|
||||
)
|
||||
},
|
||||
},
|
||||
)
|
||||
def addresses(
|
||||
self, request, id=None, q=None, zipcode='', citycode=None, lat=None, lon=None, page_limit=5
|
||||
self,
|
||||
request,
|
||||
id=None,
|
||||
q=None,
|
||||
zipcode='',
|
||||
citycode=None,
|
||||
lat=None,
|
||||
lon=None,
|
||||
page_limit=5,
|
||||
type=None,
|
||||
):
|
||||
if id is not None:
|
||||
return self.get_by_id(request, id=id, citycode=citycode)
|
||||
|
@ -156,6 +174,8 @@ class BaseAdresse(AddressResource):
|
|||
if self.latitude and self.longitude or lat and lon:
|
||||
query_args['lat'] = lat or self.latitude
|
||||
query_args['lon'] = lon or self.longitude
|
||||
if type in ('housenumber', 'street', 'locality', 'municipality'):
|
||||
query_args['type'] = type
|
||||
query = urlencode(query_args)
|
||||
url = urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
|
||||
|
||||
|
@ -167,7 +187,8 @@ class BaseAdresse(AddressResource):
|
|||
|
||||
result = []
|
||||
|
||||
for feature in result_response.json().get('features'):
|
||||
features = result_response.json().get('features')
|
||||
for feature in features:
|
||||
if not feature['geometry']['type'] == 'Point':
|
||||
continue # skip unknown
|
||||
data = self.format_address_data(feature)
|
||||
|
@ -177,7 +198,6 @@ class BaseAdresse(AddressResource):
|
|||
)
|
||||
if not created:
|
||||
address.update_timestamp()
|
||||
|
||||
return {'data': result}
|
||||
|
||||
def get_by_id(self, request, id, citycode=None):
|
||||
|
@ -222,6 +242,7 @@ class BaseAdresse(AddressResource):
|
|||
@endpoint(
|
||||
pattern='(?P<q>.+)?$',
|
||||
description=_('Geocoding (Nominatim API)'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'q': {'description': _('Address'), 'example_value': '169 rue du chateau, paris'},
|
||||
'zipcode': {'description': _('Zipcode')},
|
||||
|
@ -236,30 +257,51 @@ class BaseAdresse(AddressResource):
|
|||
'Prioritize results according to coordinates. "lon" parameter must be present.'
|
||||
)
|
||||
},
|
||||
'type': {
|
||||
'description': _(
|
||||
'Type of address to return, housenumber, street, locality, municipality or all. Default is all.'
|
||||
)
|
||||
},
|
||||
},
|
||||
)
|
||||
def search(self, request, q, zipcode='', citycode=None, lat=None, lon=None, **kwargs):
|
||||
def search(self, request, q, zipcode='', citycode=None, lat=None, lon=None, type=None, **kwargs):
|
||||
if kwargs.get('format', 'json') != 'json':
|
||||
raise NotImplementedError()
|
||||
result = self.addresses(
|
||||
request, q=q, zipcode=zipcode, citycode=citycode, lat=lat, lon=lon, page_limit=1
|
||||
request,
|
||||
q=q,
|
||||
zipcode=zipcode,
|
||||
citycode=citycode,
|
||||
lat=lat,
|
||||
lon=lon,
|
||||
page_limit=1,
|
||||
type=type,
|
||||
)
|
||||
return result['data']
|
||||
|
||||
@endpoint(
|
||||
description=_('Reverse geocoding'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'lat': {'description': _('Latitude'), 'example_value': 48.833708},
|
||||
'lon': {'description': _('Longitude'), 'example_value': 2.323349},
|
||||
'type': {
|
||||
'description': _(
|
||||
'Type of address to return, housenumber, street, locality, municipality or all. Default is all.'
|
||||
)
|
||||
},
|
||||
},
|
||||
)
|
||||
def reverse(self, request, lat, lon, **kwargs):
|
||||
def reverse(self, request, lat, lon, type=None, **kwargs):
|
||||
if kwargs.get('format', 'json') != 'json':
|
||||
raise NotImplementedError()
|
||||
|
||||
scheme, netloc, path, params, query, fragment = urlparse.urlparse(self.service_url)
|
||||
path = urlparse.urljoin(path, 'reverse/')
|
||||
query = urlencode({'lat': lat, 'lon': lon})
|
||||
query_dict = {'lat': lat, 'lon': lon}
|
||||
if type in ('housenumber', 'street', 'locality', 'municipality'):
|
||||
query_dict['type'] = type
|
||||
query = urlencode(query_dict)
|
||||
url = urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
|
||||
|
||||
try:
|
||||
|
@ -283,9 +325,10 @@ class BaseAdresse(AddressResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('Streets from zipcode'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'id': {'description': _('Street identifier')},
|
||||
'q': {'description': _("Street name")},
|
||||
'q': {'description': _('Street name')},
|
||||
'zipcode': {'description': _('Zipcode')},
|
||||
'citycode': {'description': _('INSEE City code')},
|
||||
'page_limit': {'description': _('Maximum number of results to return'), 'example_value': 30},
|
||||
|
@ -336,12 +379,13 @@ class BaseAdresse(AddressResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('Cities list'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'id': {
|
||||
'description': _('Get exactly one city using its code and postal code separated with a dot'),
|
||||
'example_value': '75056.75014',
|
||||
},
|
||||
'q': {'description': _("Search text in name or postal code"), 'example_value': 'Paris'},
|
||||
'q': {'description': _('Search text in name or postal code'), 'example_value': 'Paris'},
|
||||
'code': {
|
||||
'description': _('INSEE code (or multiple codes separated with commas)'),
|
||||
'example_value': '75056',
|
||||
|
@ -397,6 +441,7 @@ class BaseAdresse(AddressResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('Departments list'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'id': {'description': _('Get exactly one department using its code'), 'example_value': '59'},
|
||||
'q': {'description': _('Search text in name or code'), 'example_value': 'Nord'},
|
||||
|
@ -421,6 +466,7 @@ class BaseAdresse(AddressResource):
|
|||
|
||||
@endpoint(
|
||||
description=_('Regions list'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'id': {'description': _('Get exactly one region using its code'), 'example_value': '32'},
|
||||
'q': {'description': _('Search text in name or code'), 'example_value': 'Hauts-de-France'},
|
||||
|
@ -691,6 +737,15 @@ class StreetModel(UnaccentNameMixin, models.Model):
|
|||
|
||||
class Meta:
|
||||
ordering = ['unaccent_name', 'name']
|
||||
indexes = [
|
||||
models.Index(fields=['ban_id']),
|
||||
models.Index(fields=['zipcode']),
|
||||
models.Index(fields=['citycode']),
|
||||
postgresql_indexes.GinIndex(
|
||||
postgresql_indexes.OpClass(Upper('unaccent_name'), 'public.gin_trgm_ops'),
|
||||
name='%(app_label)s_street_name_idx',
|
||||
),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
@ -715,6 +770,9 @@ class RegionModel(UnaccentNameMixin, models.Model):
|
|||
class Meta:
|
||||
ordering = ['code']
|
||||
unique_together = ('resource', 'code')
|
||||
indexes = [
|
||||
models.Index(Upper('unaccent_name'), name='%(app_label)s_region_name_idx'),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return '%s %s' % (self.code, self.name)
|
||||
|
@ -742,6 +800,9 @@ class DepartmentModel(UnaccentNameMixin, models.Model):
|
|||
class Meta:
|
||||
ordering = ['code']
|
||||
unique_together = ('resource', 'code')
|
||||
indexes = [
|
||||
models.Index(Upper('unaccent_name'), name='%(app_label)s_dept_name_idx'),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return '%s %s' % (self.code, self.name)
|
||||
|
@ -765,7 +826,9 @@ class CityModel(UnaccentNameMixin, models.Model):
|
|||
'id': '%s.%s' % (self.code, self.zipcode),
|
||||
'code': self.code,
|
||||
'name': self.name,
|
||||
'city': self.name,
|
||||
'zipcode': self.zipcode,
|
||||
'postcode': self.zipcode,
|
||||
'population': self.population,
|
||||
'department_code': self.department.code if self.department else None,
|
||||
'department_name': self.department.name if self.department else None,
|
||||
|
@ -777,6 +840,11 @@ class CityModel(UnaccentNameMixin, models.Model):
|
|||
class Meta:
|
||||
ordering = ['-population', 'zipcode', 'unaccent_name', 'name']
|
||||
unique_together = ('resource', 'code', 'zipcode')
|
||||
indexes = [
|
||||
models.Index(fields=['code']),
|
||||
models.Index(fields=['zipcode']),
|
||||
models.Index(Upper('unaccent_name'), name='%(app_label)s_city_name_idx'),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return '%s %s' % (self.zipcode, self.name)
|
||||
|
|
|
@ -68,7 +68,6 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
methods=['post'],
|
||||
name='meeting',
|
||||
perm='can_access',
|
||||
description_post=_('Create a meeting'),
|
||||
post={
|
||||
'request_body': {
|
||||
|
@ -146,7 +145,6 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
methods=['get', 'delete'],
|
||||
name='meeting',
|
||||
perm='can_access',
|
||||
pattern=r'^(?P<guid>[0-9a-f]{32})/?$',
|
||||
example_pattern='{guid}/',
|
||||
description_post=_('Get a meeting'),
|
||||
|
@ -174,6 +172,7 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
methods=['get'],
|
||||
name='meeting',
|
||||
perm='OPEN',
|
||||
pattern=r'^(?P<guid>[0-9a-f]{32})/is-running/?$',
|
||||
example_pattern='{guid}/is-running/',
|
||||
description_post=_('Report if meeting is running'),
|
||||
|
@ -196,6 +195,7 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
methods=['get'],
|
||||
name='meeting',
|
||||
perm='OPEN',
|
||||
pattern=r'^(?P<guid>[0-9a-f]{32})/join/agent/(?P<key>[^/]*)/?$',
|
||||
example_pattern='{guid}/join/agent/',
|
||||
description_post=_('Get a meeting'),
|
||||
|
@ -223,6 +223,7 @@ class Resource(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
methods=['get'],
|
||||
name='meeting',
|
||||
perm='OPEN',
|
||||
pattern=r'^(?P<guid>[0-9a-f]{32})/join/user/(?P<key>[^/]*)/?$',
|
||||
example_pattern='{guid}/join/user/',
|
||||
description_post=_('Get a meeting'),
|
||||
|
|
|
@ -0,0 +1,47 @@
|
|||
# Generated by Django 3.2.18 on 2024-02-20 15:41
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('base', '0030_resourcelog_base_resour_appname_298cbc_idx'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CalDAV',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
|
||||
),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
(
|
||||
'dav_url',
|
||||
models.URLField(
|
||||
help_text='DAV root URL (such as https://test.egw/groupdav.php/)',
|
||||
verbose_name='DAV root URL',
|
||||
),
|
||||
),
|
||||
('dav_login', models.CharField(max_length=128, verbose_name='DAV username')),
|
||||
('dav_password', models.CharField(max_length=512, verbose_name='DAV password')),
|
||||
(
|
||||
'users',
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name='_caldav_caldav_users_+',
|
||||
related_query_name='+',
|
||||
to='base.ApiUser',
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'CalDAV',
|
||||
},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,367 @@
|
|||
# passerelle - uniform access to multiple data sources and services
|
||||
# Copyright (C) 2024 Entr'ouvert
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import functools
|
||||
import urllib.parse
|
||||
|
||||
import caldav
|
||||
import requests
|
||||
from django.db import models
|
||||
from django.utils.dateparse import parse_date, parse_datetime
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from passerelle.base.models import BaseResource
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.conversion import exception_to_text
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
|
||||
EVENT_SCHEMA_PART = {
|
||||
'type': 'object',
|
||||
'description': _('Ical event properties ( VEVENT RFC 5545 3.6.1 )'),
|
||||
'properties': {
|
||||
'DTSTART': {
|
||||
'type': 'string',
|
||||
'description': _('Event start (included) ISO-8601 date-time or date (for allday event)'),
|
||||
},
|
||||
'DTEND': {
|
||||
'type': 'string',
|
||||
'description': _('Event end (excluded) ISO-8601 date-time or date (for allday event)'),
|
||||
},
|
||||
'SUMMARY': {
|
||||
'type': 'string',
|
||||
'description': 'RFC 5545 3.8.1.12',
|
||||
},
|
||||
'DESCRIPTION': {
|
||||
'type': 'string',
|
||||
'description': 'RFC 5545 3.8.2.5',
|
||||
},
|
||||
'LOCATION': {
|
||||
'type': 'string',
|
||||
'description': 'RFC 5545 3.8.1.7',
|
||||
},
|
||||
'CATEGORY': {'type': 'string'},
|
||||
'TRANSP': {
|
||||
'type': 'boolean',
|
||||
'description': _('Transparent if true else opaque (RFC 5545 3.8.2.7)'),
|
||||
},
|
||||
'RRULE': {
|
||||
'description': _('Recurrence rule (RFC 5545 3.8.5.3)'),
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'FREQ': {
|
||||
'type': 'string',
|
||||
'enum': ['WEEKLY', 'MONTHLY', 'YEARLY'],
|
||||
},
|
||||
'BYDAY': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'string',
|
||||
'enum': ['MO', 'TU', 'WE', 'TH', 'FR', 'SA', 'SU'],
|
||||
},
|
||||
},
|
||||
'BYMONTH': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'integer',
|
||||
'minimum': 1,
|
||||
'maximum': 12,
|
||||
},
|
||||
},
|
||||
'COUNT': {
|
||||
'type': 'integer',
|
||||
'minimum': 1,
|
||||
},
|
||||
'UNTIL': {
|
||||
'type': 'string',
|
||||
'description': _('Date or date and time indicating the end of recurrence'),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
USERNAME_PARAM = {
|
||||
'description': _('The calendar\'s owner username'),
|
||||
'type': 'string',
|
||||
}
|
||||
|
||||
EVENT_UID_PARAM = {
|
||||
'description': _('An event UID'),
|
||||
'type': 'string',
|
||||
}
|
||||
|
||||
|
||||
# Action's request body schema
|
||||
EVENT_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': _('Event description schema'),
|
||||
'unflatten': True,
|
||||
**EVENT_SCHEMA_PART,
|
||||
}
|
||||
|
||||
|
||||
def clean_egw_response(response, *args, **kwargs):
|
||||
'''requests hooks that modify requests's responses deleting
|
||||
EGW's SQL log lines when there is some
|
||||
|
||||
SQL log lines are matched by checking that they :
|
||||
- startswith "==> SQL =>"
|
||||
- endswith "<br>"
|
||||
'''
|
||||
response._content = b'\n'.join(
|
||||
line
|
||||
for line in response.content.split(b'\n')
|
||||
if not line.startswith(b'==> SQL =>') or not line.endswith(b'<br>')
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
class CalDAV(BaseResource):
|
||||
dav_url = models.URLField(
|
||||
blank=False,
|
||||
verbose_name=_('DAV root URL'),
|
||||
help_text=_('DAV root URL (such as https://test.egw/groupdav.php/)'),
|
||||
)
|
||||
dav_login = models.CharField(max_length=128, verbose_name=_('DAV username'), blank=False)
|
||||
dav_password = models.CharField(max_length=512, verbose_name=_('DAV password'), blank=False)
|
||||
|
||||
category = _('Misc')
|
||||
|
||||
log_requests_errors = False
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('CalDAV')
|
||||
|
||||
@functools.cached_property
|
||||
def dav_client(self):
|
||||
'''Instanciate a caldav.DAVCLient and return the instance'''
|
||||
client = caldav.DAVClient(self.dav_url, username=self.dav_login, password=self.dav_password)
|
||||
# Replace DAVClient.session requests.Session instance by our
|
||||
# own requests session in order to log DAV interactions
|
||||
client.session = self.requests
|
||||
# adds EGW response cleaning hook
|
||||
self.requests.hooks['response'] = clean_egw_response
|
||||
return client
|
||||
|
||||
def check_status(self):
|
||||
'''Attempt a propfind on DAV root URL'''
|
||||
try:
|
||||
rep = self.dav_client.propfind()
|
||||
rep.find_objects_and_props()
|
||||
except caldav.lib.error.AuthorizationError:
|
||||
raise Exception(_('Not authorized: bad login/password ?'))
|
||||
|
||||
@endpoint(
|
||||
name='event',
|
||||
pattern='^create$',
|
||||
example_pattern='create',
|
||||
methods=['post'],
|
||||
post={'request_body': {'schema': {'application/json': EVENT_SCHEMA}}},
|
||||
parameters={
|
||||
'username': USERNAME_PARAM,
|
||||
},
|
||||
)
|
||||
def create_event(self, request, username, post_data):
|
||||
'''Event creation endpoint'''
|
||||
cal = self.get_calendar(username)
|
||||
self._process_event_properties(post_data)
|
||||
|
||||
# Sequence is auto-incremented when saved, -1 will lead to the
|
||||
# expected SEQUENCE:0 when an event is created
|
||||
post_data['SEQUENCE'] = -1
|
||||
try:
|
||||
evt = cal.save_event(**post_data)
|
||||
except requests.exceptions.RequestException as expt:
|
||||
raise APIError(
|
||||
_('Error sending creation request to caldav server'),
|
||||
data={
|
||||
'expt_class': str(type(expt)),
|
||||
'expt': str(expt),
|
||||
'username': username,
|
||||
},
|
||||
)
|
||||
except caldav.lib.error.DAVError as expt:
|
||||
raise APIError(
|
||||
_('Error creating event'),
|
||||
data={'expt_class': str(type(expt)), 'expt': exception_to_text(expt), 'username': username},
|
||||
)
|
||||
return {'data': {'event_id': evt.id}}
|
||||
|
||||
# Patch do not support request_body validation, using post instead
|
||||
@endpoint(
|
||||
name='event',
|
||||
pattern='^update$',
|
||||
example_pattern='update',
|
||||
methods=['post'],
|
||||
post={'request_body': {'schema': {'application/json': EVENT_SCHEMA}}},
|
||||
parameters={
|
||||
'username': USERNAME_PARAM,
|
||||
'event_id': EVENT_UID_PARAM,
|
||||
},
|
||||
)
|
||||
def update_event(self, request, username, event_id, post_data):
|
||||
'''Event update endpoint'''
|
||||
self._process_event_properties(post_data)
|
||||
ical = self.get_event(username, event_id)
|
||||
|
||||
vevent = ical.icalendar_instance.walk('VEVENT')
|
||||
if not len(vevent) == 1:
|
||||
raise APIError(
|
||||
_('Given event (user:%r uid:%r) do not contains VEVENT component') % (username, event_id),
|
||||
data={
|
||||
'username': username,
|
||||
'event_uid': event_id,
|
||||
'VEVENT': str(vevent),
|
||||
},
|
||||
)
|
||||
vevent = vevent[0]
|
||||
# vevent.update(post_data) do not convert values as expected
|
||||
for k, v in post_data.items():
|
||||
vevent.pop(k)
|
||||
vevent.add(k, v)
|
||||
if 'SEQUENCE' not in vevent:
|
||||
# SEQUENCE is auto-incremented when present
|
||||
# here after a 1st modification the SEQUENCE will be 1 (not 0)
|
||||
vevent['SEQUENCE'] = 0
|
||||
try:
|
||||
# do not use ical.save(no_create=True) : no_create fails on some calDAV
|
||||
ical.save()
|
||||
except requests.exceptions.RequestException as expt:
|
||||
raise APIError(
|
||||
_('Error sending update request to caldav server'),
|
||||
data={
|
||||
'expt_class': str(type(expt)),
|
||||
'expt': str(expt),
|
||||
'username': username,
|
||||
'event_id': event_id,
|
||||
},
|
||||
)
|
||||
return {'data': {'event_id': ical.id}}
|
||||
|
||||
@endpoint(
|
||||
name='event',
|
||||
pattern='^delete$',
|
||||
example_pattern='delete',
|
||||
methods=['delete'],
|
||||
parameters={
|
||||
'username': USERNAME_PARAM,
|
||||
'event_id': EVENT_UID_PARAM,
|
||||
},
|
||||
)
|
||||
def delete_event(self, request, username, event_id):
|
||||
ical = self.get_event(username, event_id)
|
||||
try:
|
||||
ical.delete()
|
||||
except requests.exceptions.RequestException as expt:
|
||||
raise APIError(
|
||||
_('Error sending deletion request to caldav server'),
|
||||
data={
|
||||
'expt_class': str(type(expt)),
|
||||
'expt': str(expt),
|
||||
'username': username,
|
||||
'event_id': event_id,
|
||||
},
|
||||
)
|
||||
return {}
|
||||
|
||||
def get_event(self, username, event_uid):
|
||||
'''Fetch an event given a username and an event_uid
|
||||
Arguments:
|
||||
- username: Calendar owner's username
|
||||
- event_uid: The event's UID
|
||||
|
||||
Returns an caldav.Event instance
|
||||
'''
|
||||
event_path = '%s/calendar/%s.ics' % (urllib.parse.quote(username), urllib.parse.quote(str(event_uid)))
|
||||
cal = self.get_calendar(username)
|
||||
try:
|
||||
ical = cal.event_by_url(event_path)
|
||||
except caldav.lib.error.DAVError as expt:
|
||||
raise APIError(
|
||||
_('Unable to get event %r in calendar owned by %r') % (event_uid, username),
|
||||
data={
|
||||
'expt': exception_to_text(expt),
|
||||
'expt_cls': str(type(expt)),
|
||||
'username': username,
|
||||
'event_uid': event_uid,
|
||||
},
|
||||
)
|
||||
except requests.exceptions.RequestException as expt:
|
||||
raise APIError(
|
||||
_('Unable to communicate with caldav server while fetching event'),
|
||||
data={
|
||||
'expt': exception_to_text(expt),
|
||||
'expt_class': str(type(expt)),
|
||||
'username': username,
|
||||
'event_uid': event_uid,
|
||||
},
|
||||
)
|
||||
return ical
|
||||
|
||||
def get_calendar(self, username):
|
||||
'''Given a username returns the associated calendar set
|
||||
Arguments:
|
||||
- username: Calendar owner's username
|
||||
|
||||
Returns A caldav.Calendar instance
|
||||
Note: do not raise any caldav exception before a method trying to make
|
||||
a request is called
|
||||
'''
|
||||
path = '%s/calendar' % urllib.parse.quote(username)
|
||||
calendar = caldav.Calendar(client=self.dav_client, url=path)
|
||||
return calendar
|
||||
|
||||
def _process_event_properties(self, data):
|
||||
'''Handles verification & convertion of event properties
|
||||
@note Modify given data dict inplace
|
||||
'''
|
||||
if 'TRANSP' in data:
|
||||
data['TRANSP'] = 'TRANSPARENT' if data['TRANSP'] else 'OPAQUE'
|
||||
|
||||
if 'CATEGORY' in data:
|
||||
data['CATEGORIES'] = [data.pop('CATEGORY')]
|
||||
|
||||
if 'RRULE' in data and 'UNTIL' in data['RRULE']:
|
||||
try:
|
||||
data['RRULE']['UNTIL'] = self._parse_date_or_datetime(data['RRULE']['UNTIL'])
|
||||
except ValueError:
|
||||
raise APIError(
|
||||
_('Unable to convert field %(name)s=%(value)r: not a valid date nor date-time')
|
||||
% {'name': 'RRULE/UNTIL', 'value': data['RRULE']['UNTIL']},
|
||||
http_status=400,
|
||||
)
|
||||
|
||||
for dt_field in ('DTSTART', 'DTEND'):
|
||||
if dt_field not in data:
|
||||
continue
|
||||
try:
|
||||
data[dt_field] = self._parse_date_or_datetime(data[dt_field])
|
||||
except ValueError:
|
||||
raise APIError(
|
||||
_('Unable to convert field %(name)s=%(value)r: not a valid date nor date-time')
|
||||
% {'name': dt_field, 'value': data[dt_field]},
|
||||
http_status=400,
|
||||
)
|
||||
|
||||
def _parse_date_or_datetime(self, value):
|
||||
try:
|
||||
ret = parse_date(value) or parse_datetime(value)
|
||||
except ValueError:
|
||||
ret = None
|
||||
if not ret:
|
||||
raise ValueError('Invalid value')
|
||||
return ret
|
|
@ -0,0 +1,87 @@
|
|||
# Generated by Django 3.2.18 on 2024-02-28 09:13
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('base', '0030_resourcelog_base_resour_appname_298cbc_idx'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Carl',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
|
||||
),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
(
|
||||
'basic_auth_username',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication username'
|
||||
),
|
||||
),
|
||||
(
|
||||
'basic_auth_password',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication password'
|
||||
),
|
||||
),
|
||||
(
|
||||
'client_certificate',
|
||||
models.FileField(
|
||||
blank=True, null=True, upload_to='', verbose_name='TLS client certificate'
|
||||
),
|
||||
),
|
||||
(
|
||||
'trusted_certificate_authorities',
|
||||
models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs'),
|
||||
),
|
||||
(
|
||||
'verify_cert',
|
||||
models.BooleanField(blank=True, default=True, verbose_name='TLS verify certificates'),
|
||||
),
|
||||
(
|
||||
'http_proxy',
|
||||
models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'),
|
||||
),
|
||||
(
|
||||
'service_url',
|
||||
models.URLField(
|
||||
help_text='Base webservice URL (such as https://carlsource.server.com/gmaoCS02/',
|
||||
verbose_name='Service URL',
|
||||
),
|
||||
),
|
||||
(
|
||||
'carl_username',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Carl token authentication username'
|
||||
),
|
||||
),
|
||||
(
|
||||
'carl_password',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Carl token authentication password'
|
||||
),
|
||||
),
|
||||
(
|
||||
'users',
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name='_carl_carl_users_+',
|
||||
related_query_name='+',
|
||||
to='base.ApiUser',
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Carl',
|
||||
},
|
||||
),
|
||||
]
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue