Compare commits
651 Commits
wip/74256-
...
main
Author | SHA1 | Date |
---|---|---|
Emmanuel Cazenave | 733cc1104f | |
Emmanuel Cazenave | 022d3f2f37 | |
Emmanuel Cazenave | 9d8ed0ae1d | |
Frédéric Péters | 35201c953f | |
Corentin Sechet | 48eeaf01e4 | |
Corentin Sechet | a3862e80fe | |
Frédéric Péters | 09632084c0 | |
Frédéric Péters | feb6fd1428 | |
Frédéric Péters | db9740da15 | |
Emmanuel Cazenave | 38d62092ef | |
Emmanuel Cazenave | c25a371c10 | |
Benjamin Dauvergne | c10b2327dd | |
Corentin Sechet | 3538d06f6f | |
Nicolas Roche | abafd19553 | |
Nicolas Roche | a565716db2 | |
Nicolas Roche | d4bb8059e0 | |
Benjamin Dauvergne | 10590eb9d9 | |
Yann Weber | 23a59e3f0f | |
Yann Weber | 71d40f1230 | |
Yann Weber | d740651fcd | |
Yann Weber | 1fa2c0f9a7 | |
Corentin Sechet | 96c1e49e23 | |
Thomas NOËL | 3729c4605d | |
Yann Weber | 923125e786 | |
Valentin Deniaud | 4a243f72e5 | |
Yann Weber | b5ccfb60ff | |
Yann Weber | a36a0f9e9c | |
Yann Weber | fd7a2d487d | |
Yann Weber | 1e13344dc0 | |
Yann Weber | ed4a4b629e | |
Yann Weber | 82e4d424c7 | |
Serghei Mihai | 9ccf11fbd2 | |
Yann Weber | 764a1997f9 | |
Yann Weber | 04a9840744 | |
Yann Weber | 7b58db7ea7 | |
Yann Weber | 8986d35915 | |
Nicolas Roche | 5d26332646 | |
Yann Weber | 0f75026c9a | |
Yann Weber | f32d06b474 | |
Yann Weber | 08b82d398d | |
Yann Weber | 5e3a2d23d9 | |
Yann Weber | c729600cfc | |
Yann Weber | 665c16bca2 | |
Nicolas Roche | 5f86102711 | |
Corentin Sechet | 47f925d62c | |
Corentin Sechet | 6369875b12 | |
Corentin Sechet | 7330fce543 | |
Corentin Sechet | 6fc97253ff | |
Corentin Sechet | 631fd54f30 | |
Corentin Sechet | 8e7056f4de | |
Corentin Sechet | 99662694e6 | |
Frédéric Péters | f7f848af82 | |
Emmanuel Cazenave | b35b0fccd3 | |
Emmanuel Cazenave | 1570ba1f3f | |
Emmanuel Cazenave | ea1a5a87d6 | |
Corentin Sechet | 96f992e11d | |
Nicolas Roche | fd91664b1e | |
Corentin Sechet | 3f584e13a7 | |
Benjamin Dauvergne | 668ddf08e5 | |
Benjamin Dauvergne | b9cc850b7f | |
Benjamin Dauvergne | b2aa4c72f1 | |
Frédéric Péters | d416793d64 | |
Frédéric Péters | 51d2b4b314 | |
Corentin Sechet | c9001cdda4 | |
Corentin Sechet | 2f50e4b207 | |
Benjamin Dauvergne | 91b92aeb44 | |
Benjamin Dauvergne | 628b38fe5f | |
Benjamin Dauvergne | 8d6f202b16 | |
Benjamin Dauvergne | 68764cd9c2 | |
Benjamin Dauvergne | 33b39c52f1 | |
Corentin Sechet | 0fa387d6d0 | |
Corentin Sechet | 9ed59bc94d | |
Benjamin Dauvergne | 7c9e487482 | |
Corentin Sechet | 2842439ce1 | |
Corentin Sechet | 4738850fcc | |
Serghei Mihai | 38d3fbbf4e | |
Corentin Sechet | 36dfa9508e | |
Serghei Mihai | cd0f441d3b | |
Benjamin Dauvergne | 97e77b55c9 | |
Nicolas Roche | c3b5707ea6 | |
Benjamin Dauvergne | d05217cdc0 | |
Benjamin Dauvergne | 5b2f2d5b97 | |
Benjamin Dauvergne | 4465d64903 | |
Benjamin Dauvergne | 0b4de669fd | |
Benjamin Dauvergne | a15a11ec4a | |
Benjamin Dauvergne | 9a487dde91 | |
Nicolas Roche | 9a158a66d1 | |
Benjamin Dauvergne | a1d4c44ac4 | |
Benjamin Dauvergne | f2b64b6ebf | |
Benjamin Dauvergne | 45f6ee9e8d | |
Benjamin Dauvergne | 3c30a76f3e | |
Benjamin Dauvergne | 3ca8e98485 | |
Emmanuel Cazenave | 8f21df1dc7 | |
Emmanuel Cazenave | 4258520491 | |
Emmanuel Cazenave | 0f1117f483 | |
Nicolas Roche | eac67cb852 | |
Nicolas Roche | f841e98049 | |
Nicolas Roche | ad543177f1 | |
Nicolas Roche | 1a30653192 | |
Nicolas Roche | 33c756e76a | |
Benjamin Dauvergne | 94e60a35b2 | |
Benjamin Dauvergne | 3ad6c89068 | |
Benjamin Dauvergne | df0084d202 | |
Nicolas Roche | 13d30a8049 | |
Nicolas Roche | b5aa2bb2b2 | |
Nicolas Roche | 509ae33314 | |
Nicolas Roche | c3557f628a | |
Nicolas Roche | 93c6224357 | |
Nicolas Roche | e2e591afeb | |
Nicolas Roche | 94057f7a0f | |
Nicolas Roche | 86ba7d6a55 | |
Frédéric Péters | 129e2a5af3 | |
Frédéric Péters | 1c7f6c2557 | |
Nicolas Roche | 8459ef11a3 | |
Nicolas Roche | 8c9edcd332 | |
Lauréline Guérin | f69f6281ab | |
Nicolas Roche | 72b3315ae4 | |
Nicolas Roche | 08f347edf3 | |
Nicolas Roche | 28d2ea1ba9 | |
Frédéric Péters | 9c695acf63 | |
Benjamin Dauvergne | bbf7aabb30 | |
Benjamin Dauvergne | ee1baf8b50 | |
Benjamin Dauvergne | 37536df9d1 | |
Thomas NOËL | 514c3f1995 | |
Thomas NOËL | dd87030c50 | |
Nicolas Roche | 104f96b61c | |
Thomas NOËL | 6f3516297e | |
Thomas NOËL | 43e2d9222a | |
Thomas NOËL | d0ecf8af77 | |
Thomas NOËL | 31a0828d6d | |
Thomas NOËL | 020a402a96 | |
Nicolas Roche | 0010095146 | |
Corentin Sechet | b377b87d5d | |
Thomas NOËL | 52886c216c | |
Thomas NOËL | b8fc9716a4 | |
Thomas NOËL | 40c3c6affb | |
Nicolas Roche | ec8dd0a43c | |
Nicolas Roche | 5ad847df95 | |
Nicolas Roche | 8e6f61ceb7 | |
Nicolas Roche | 13fe6411eb | |
Thomas NOËL | 6101988404 | |
Serghei Mihai | 8b3b8edfda | |
Pierre Ducroquet | 52981183ff | |
Benjamin Dauvergne | afab9d49a1 | |
Nicolas Roche | e592c33021 | |
Nicolas Roche | ad4b9de490 | |
Benjamin Dauvergne | b4d637249a | |
Benjamin Dauvergne | f15d802b11 | |
Emmanuel Cazenave | 2e167a3466 | |
Frédéric Péters | f513f2451d | |
Frédéric Péters | b9939892b8 | |
Benjamin Dauvergne | 7de7cd8b3f | |
Benjamin Dauvergne | c247197c6e | |
Benjamin Dauvergne | bfd1fcc2f6 | |
Nicolas Roche | 905e3b141f | |
Nicolas Roche | e8122d29eb | |
Serghei Mihai | 4c5204bd2f | |
Corentin Sechet | 07619bc012 | |
Corentin Sechet | b516b7b66c | |
Corentin Sechet | 92768f5852 | |
Corentin Sechet | d69e4df328 | |
Corentin Sechet | 2711f5c615 | |
Benjamin Dauvergne | cd08a2068c | |
Benjamin Dauvergne | 66e99362ef | |
Benjamin Dauvergne | 00443f8629 | |
Corentin Sechet | 230d424571 | |
Corentin Sechet | a8e2223c50 | |
Corentin Sechet | 7951510aa1 | |
Thomas NOËL | 08fa0fad21 | |
Benjamin Dauvergne | 320013ac68 | |
Benjamin Dauvergne | 4338ee9cd7 | |
Corentin Sechet | 32d3dd01bc | |
Corentin Sechet | 7314fa224c | |
Corentin Sechet | 82e9018865 | |
Corentin Sechet | a51c49a865 | |
Corentin Sechet | 1e12dae71b | |
Thomas NOËL | e506facfd6 | |
Thomas NOËL | 0a28034137 | |
Nicolas Roche | 81f58cad59 | |
Emmanuel Cazenave | 2a73e4dfb3 | |
Nicolas Roche | 979e531b3a | |
Nicolas Roche | 5cd1e3aacc | |
Nicolas Roche | aa9585071a | |
Nicolas Roche | 923427783c | |
Nicolas Roche | 34ac701200 | |
Nicolas Roche | 1b0c842d48 | |
Nicolas Roche | d0f4b9ecf9 | |
Nicolas Roche | a7ff9bbc4a | |
Nicolas Roche | b7b50717ca | |
Nicolas Roche | 6c4fc4152d | |
Nicolas Roche | e59765eaf7 | |
Thomas NOËL | 8bb8f2c1df | |
Thomas NOËL | e2a45ea01b | |
Thomas NOËL | 11d3bd5a9b | |
Thomas NOËL | 2162e9d08d | |
Thomas NOËL | 264550e363 | |
Thomas NOËL | ba58f183ed | |
Thomas NOËL | f564e71d5d | |
Thomas NOËL | 6f7acc1489 | |
Serghei Mihai | 62c0b91ac4 | |
Emmanuel Cazenave | 2b0842eb03 | |
Emmanuel Cazenave | d315580294 | |
Serghei Mihai | 140863373f | |
Serghei Mihai | fa50ff9129 | |
Emmanuel Cazenave | 0154defcce | |
Emmanuel Cazenave | f336d7a952 | |
Thomas NOËL | c148f6ae03 | |
Nicolas Roche | bda1eba253 | |
Emmanuel Cazenave | 8892a97435 | |
Serghei Mihai | 14a6fb1aed | |
Benjamin Dauvergne | f63e250e0d | |
Serghei Mihai | 4789f1e1ff | |
Serghei Mihai | 2bbc835787 | |
Serghei Mihai | 94184d9c5e | |
Corentin Sechet | 76f3860ad2 | |
Corentin Sechet | 3d5ec0268c | |
Nicolas Roche | f2652bac36 | |
Corentin Sechet | c598673e3d | |
Corentin Sechet | fe1f40cc7d | |
Corentin Sechet | a3db9b1e35 | |
Thomas NOËL | 92f5b5f26b | |
Frédéric Péters | d6b87039cb | |
Corentin Sechet | 9d67f8587a | |
Benjamin Dauvergne | a9f2956db7 | |
Benjamin Dauvergne | 8266740b52 | |
Lauréline Guérin | 117743e0a6 | |
Nicolas Roche | 49226aca44 | |
Nicolas Roche | bc62bdc3fd | |
Nicolas Roche | 4bd7032998 | |
Nicolas Roche | e1b3ab7646 | |
Frédéric Péters | 7a671f7e74 | |
Nicolas Roche | 441ac49c58 | |
Nicolas Roche | bac28e933c | |
Benjamin Dauvergne | b497988bf5 | |
Emmanuel Cazenave | 898a14f821 | |
Emmanuel Cazenave | ef0b518aba | |
Emmanuel Cazenave | bf2610b4c5 | |
Paul Marillonnet | c56c0676de | |
Corentin Sechet | 649c1c05a8 | |
Benjamin Dauvergne | a192a953b9 | |
Benjamin Dauvergne | faf3e4692e | |
Nicolas Roche | 60bcc9d82e | |
Corentin Sechet | 0d6e180fef | |
Corentin Sechet | 02300e612e | |
Corentin Sechet | 520e6a818b | |
Corentin Sechet | 199075ed80 | |
Corentin Sechet | 5fe9b9a1e6 | |
Corentin Sechet | aed4c44107 | |
Corentin Sechet | 62e452c31e | |
Nicolas Roche | fd1c591ab3 | |
Nicolas Roche | 40287181cc | |
Serghei Mihai | 4ccaad6d35 | |
Nicolas Roche | 96b0777324 | |
Serghei Mihai | 9e64fa5c9b | |
Serghei Mihai | 2916fb7c32 | |
Emmanuel Cazenave | e549db488f | |
Emmanuel Cazenave | 16fc487119 | |
Corentin Sechet | 9d78d8fcf3 | |
Corentin Sechet | 8cdf3dcae2 | |
Nicolas Roche | 659ba18a00 | |
Nicolas Roche | 7cbd27afd3 | |
Nicolas Roche | 1a37984298 | |
Nicolas Roche | a4805681a1 | |
Nicolas Roche | eacfb506d6 | |
Frédéric Péters | 306cba2423 | |
Valentin Deniaud | bcfc02d94a | |
Valentin Deniaud | 40142de8d2 | |
Valentin Deniaud | 6e7ac8c145 | |
Lauréline Guérin | 0a46addb73 | |
Frédéric Péters | a19073c7e9 | |
Frédéric Péters | 7178f7c4d0 | |
Nicolas Roche | fac61176a3 | |
Lauréline Guérin | 4f136ee898 | |
Nicolas Roche | 06f22a03f8 | |
Frédéric Péters | ab2f8a847b | |
Thomas NOËL | c38ee2913c | |
Corentin Sechet | 100064eba8 | |
Corentin Sechet | 5157fde445 | |
Serghei Mihai | 8fa7d79b1e | |
Serghei Mihai | c83228e375 | |
Nicolas Roche | 3cee8e4350 | |
Lauréline Guérin | 2775202bd8 | |
Nicolas Roche | 64b25c7c73 | |
Nicolas Roche | a8bab7fa01 | |
Nicolas Roche | 16fd6aae41 | |
Nicolas Roche | 56e2a4b1d9 | |
Nicolas Roche | 122a0f6c22 | |
Nicolas Roche | 569159a95f | |
Nicolas Roche | 2b0612d5ef | |
Nicolas Roche | 25420ca260 | |
Nicolas Roche | bd388e42a6 | |
Valentin Deniaud | 4ce087b6ef | |
Valentin Deniaud | 75db45cdfa | |
Valentin Deniaud | 5cda735517 | |
Serghei Mihai | ccb53be16e | |
Serghei Mihai | 9626f03f34 | |
Emmanuel Cazenave | de3d69e2d2 | |
Emmanuel Cazenave | 84f1b2e728 | |
Emmanuel Cazenave | d199eb9de7 | |
Nicolas Roche | 9386398863 | |
Lauréline Guérin | f928a10fc5 | |
Nicolas Roche | 0616f216bf | |
Benjamin Dauvergne | 5d05b38653 | |
Thomas NOËL | 1afe1a8649 | |
Emmanuel Cazenave | 34e0b6f8d8 | |
Emmanuel Cazenave | a16dc0c83a | |
Lauréline Guérin | de81517bb4 | |
Emmanuel Cazenave | 5ce8d34fa5 | |
Nicolas Roche | 0b8730b9ba | |
Emmanuel Cazenave | 60d2277b55 | |
Emmanuel Cazenave | faebc78066 | |
Emmanuel Cazenave | 7212c9056d | |
Nicolas Roche | 0e07b8fca7 | |
Frédéric Péters | f498f8f32a | |
Frédéric Péters | d59eaa8ab3 | |
Frédéric Péters | 85f6e24aab | |
Nicolas Roche | 9dee19e493 | |
Nicolas Roche | 6238b21727 | |
Nicolas Roche | 451cf508ce | |
Nicolas Roche | 5a1046c7d2 | |
Nicolas Roche | 5e94eb86a3 | |
Nicolas Roche | ad752230d6 | |
Nicolas Roche | 16ab7c0a77 | |
Nicolas Roche | 4d58d2c80b | |
Nicolas Roche | 6147e497bc | |
Nicolas Roche | c22981630f | |
Nicolas Roche | 16c3bbd120 | |
Nicolas Roche | 37fcc2d65a | |
Nicolas Roche | 1257eea8d2 | |
Nicolas Roche | f5dc0f4fb2 | |
Nicolas Roche | 3946028e53 | |
Nicolas Roche | 998e1c1208 | |
Nicolas Roche | aa99a0d826 | |
Nicolas Roche | e2047aa318 | |
Nicolas Roche | 6546c7ac63 | |
Nicolas Roche | 2083b70610 | |
Nicolas Roche | a15c80765b | |
Nicolas Roche | bcbe12679e | |
Nicolas Roche | ae5681b0e7 | |
Nicolas Roche | 03cbdf578f | |
Nicolas Roche | e873bfaaa8 | |
Nicolas Roche | 59989d562b | |
Nicolas Roche | aa250342da | |
Nicolas Roche | 59955c60db | |
Nicolas Roche | 72b3cc8a87 | |
Nicolas Roche | ea1c2b34bb | |
Lauréline Guérin | 95ba4d5f0e | |
Lauréline Guérin | fb01b9a9ec | |
Lauréline Guérin | 468e5309a9 | |
Thomas NOËL | 002af7c243 | |
Thomas NOËL | f7739d1aa2 | |
Nicolas Roche | 4e5ec54b26 | |
Nicolas Roche | 61308407de | |
Nicolas Roche | ec1c4886fd | |
Frédéric Péters | 2618463abb | |
Nicolas Roche | c8fb63fe3e | |
Thomas NOËL | 7a21a3e50c | |
Emmanuel Cazenave | d479819f50 | |
Emmanuel Cazenave | 415e9f8a9e | |
Serghei Mihai | 166d58591a | |
Nicolas Roche | 2cac256517 | |
Benjamin Dauvergne | 62ed945d62 | |
Benjamin Dauvergne | dcb772fdbd | |
Benjamin Dauvergne | 7959ec9a3c | |
Frédéric Péters | 65d3f390f3 | |
Emmanuel Cazenave | 65409f2070 | |
Nicolas Roche | 3f69bdb447 | |
Nicolas Roche | 9d0fc45957 | |
Nicolas Roche | 29ce646989 | |
Frédéric Péters | 9a892a0e77 | |
Frédéric Péters | e4a9d16719 | |
Corentin Sechet | 29b8775a16 | |
Nicolas Roche | d176d9fc4b | |
Nicolas Roche | 8df0c9ec11 | |
Nicolas Roche | d21669a250 | |
Emmanuel Cazenave | b74e848dbd | |
Emmanuel Cazenave | 0c06086585 | |
Emmanuel Cazenave | 6b74e9a632 | |
Emmanuel Cazenave | 7102c3150a | |
Frédéric Péters | 9ff69633a3 | |
Frédéric Péters | 6194728fb3 | |
Frédéric Péters | 1ab81c200b | |
Thomas NOËL | 2277fcdd23 | |
Corentin Sechet | 54dbbc3148 | |
Frédéric Péters | 6b432122d3 | |
Thomas NOËL | 3ba866a275 | |
Thomas Jund | 3934030677 | |
Corentin Sechet | e2ce17f701 | |
Corentin Sechet | 7395fa5560 | |
Nicolas Roche | 816da0f6b6 | |
Nicolas Roche | be4a65b6be | |
Nicolas Roche | 347d09db89 | |
Nicolas Roche | 0276de78c2 | |
Nicolas Roche | 80fc536e2b | |
Nicolas Roche | 3dab63a0f3 | |
Nicolas Roche | bd9270a8ad | |
Emmanuel Cazenave | 31efc19163 | |
Lauréline Guérin | 9a4f57612e | |
Lauréline Guérin | e0ed5cc1c9 | |
Emmanuel Cazenave | ab46f17856 | |
Nicolas Roche | 0b81087341 | |
Nicolas Roche | f71891abd7 | |
Nicolas Roche | 731148917e | |
Nicolas Roche | c413b5738f | |
Emmanuel Cazenave | fd09fb2fd7 | |
Benjamin Dauvergne | 84cd51957e | |
Nicolas Roche | 8e215185ec | |
Nicolas Roche | 4cdfeb47b0 | |
Nicolas Roche | 896391c718 | |
Nicolas Roche | 86ac566bbb | |
Nicolas Roche | 135cdbf46a | |
Thomas NOËL | d87dd7b107 | |
Frédéric Péters | 23480ce819 | |
Frédéric Péters | 1bc79d7312 | |
Nicolas Roche | bdd68dc6e8 | |
Serghei Mihai | f3a7f7f460 | |
Serghei Mihai | f704763565 | |
Emmanuel Cazenave | 822a0d83b4 | |
Benjamin Dauvergne | 0a6733f070 | |
Benjamin Dauvergne | 4d89c476bb | |
Benjamin Dauvergne | 96413bd5d9 | |
Benjamin Dauvergne | 7defa59ccc | |
Benjamin Dauvergne | 9f5927daa5 | |
Benjamin Dauvergne | a20835e118 | |
Nicolas Roche | fc9444cd98 | |
Nicolas Roche | 6f562d6e10 | |
Nicolas Roche | acd0ba843c | |
Nicolas Roche | 5662aa069d | |
Nicolas Roche | 06b640731f | |
Nicolas Roche | 5243e328a4 | |
Nicolas Roche | bbc8e1cb5b | |
Nicolas Roche | 1be01a198e | |
Nicolas Roche | c7d33287c5 | |
Nicolas Roche | 483268c636 | |
Nicolas Roche | be6d3df42e | |
Nicolas Roche | db11bfcbb7 | |
Nicolas Roche | 1d21ac784c | |
Nicolas Roche | 87f982e8a4 | |
Nicolas Roche | 18825c057e | |
Nicolas Roche | 36940933d9 | |
Nicolas Roche | 9535c6c68f | |
Nicolas Roche | 2d208a9b96 | |
Nicolas Roche | d4c0214ac7 | |
Nicolas Roche | 87b97a417b | |
Frédéric Péters | 8e095b9dfa | |
Thomas NOËL | a75835584b | |
Paul Marillonnet | 6d55216e43 | |
Corentin Sechet | b3dfbd17df | |
Emmanuel Cazenave | 8146b2f0c8 | |
Emmanuel Cazenave | e4cf2d8ecf | |
Benjamin Dauvergne | 5314e91ace | |
Benjamin Dauvergne | 81697bed97 | |
Nicolas Roche | e852d7e2f3 | |
Nicolas Roche | f8124d9ece | |
Nicolas Roche | 56d14bd440 | |
Nicolas Roche | 10224eebbf | |
Nicolas Roche | c9edd27570 | |
Nicolas Roche | 80100d9d58 | |
Nicolas Roche | ba7cf207b7 | |
Nicolas Roche | 75e17ba29a | |
Frédéric Péters | c76fff6f23 | |
Frédéric Péters | 8b88aefe2a | |
Nicolas Roche | 0ed60d380d | |
Nicolas Roche | c695b51d85 | |
Nicolas Roche | a3b515b15c | |
Nicolas Roche | 29f4774d2b | |
Nicolas Roche | 324b787f33 | |
Nicolas Roche | 2c335f2ed1 | |
Nicolas Roche | 425ca113f9 | |
Nicolas Roche | a7af856e32 | |
Nicolas Roche | 894edc3097 | |
Nicolas Roche | bde51d325b | |
Frédéric Péters | 9dbab1fbc9 | |
Frédéric Péters | 9b2d811750 | |
Benjamin Dauvergne | 9984f6bb66 | |
Benjamin Dauvergne | 47984a11c2 | |
Frédéric Péters | 43847c831d | |
Frédéric Péters | eb93e78478 | |
Frédéric Péters | 89cbcd77b3 | |
Thomas NOËL | 8a9e0ff5ea | |
Nicolas Roche | 32710aaa46 | |
Frédéric Péters | ea3d48b7da | |
Frédéric Péters | 98a0cd8470 | |
Frédéric Péters | ff4313d4eb | |
Frédéric Péters | 1e876d9be3 | |
Frédéric Péters | 4ab1854bc9 | |
Frédéric Péters | fee1c505c6 | |
Frédéric Péters | 42d08159db | |
Frédéric Péters | d4d5bcbf4c | |
Frédéric Péters | 4d80269eba | |
Frédéric Péters | 9d548e8c9f | |
Frédéric Péters | 405c606a1d | |
Frédéric Péters | cb8ee79420 | |
Frédéric Péters | 0d2623dde5 | |
Frédéric Péters | a8db4eb9f2 | |
Nicolas Roche | 86ffca3a6a | |
Nicolas Roche | 3d99dd40d5 | |
Nicolas Roche | e4089b33a0 | |
Nicolas Roche | bb5e12809f | |
Nicolas Roche | 332ad30f1b | |
Nicolas Roche | b5a19ee8b7 | |
Nicolas Roche | 75de5eb36e | |
Nicolas Roche | bd7b1566f2 | |
Nicolas Roche | 3226fd0cf5 | |
Nicolas Roche | b425a2ac80 | |
Nicolas Roche | 5b53826063 | |
Nicolas Roche | 34aaf879cb | |
Nicolas Roche | d1fea2f13f | |
Frédéric Péters | ac4842c64d | |
Nicolas Roche | 289f7701b9 | |
Nicolas Roche | c4094d5933 | |
Nicolas Roche | 4b6ceac39e | |
Thomas NOËL | 5cb6187702 | |
Thomas NOËL | 2ac7a1c99d | |
Thomas NOËL | d0c271ac41 | |
Frédéric Péters | 8f707e90cb | |
Serghei Mihai | aa24d04fbb | |
Nicolas Roche | 6a3d1a1866 | |
Nicolas Roche | 5fd5e84900 | |
Nicolas Roche | c28b2ccb10 | |
Frédéric Péters | b0466e9468 | |
Frédéric Péters | 906adb0e5d | |
Nicolas Roche | d9b53e1ccc | |
Nicolas Roche | c9dd0f86f9 | |
Nicolas Roche | 3f81f5efa7 | |
Nicolas Roche | f0be7e0aba | |
Nicolas Roche | 50c725d393 | |
Nicolas Roche | 3a558b0ba4 | |
Nicolas Roche | 685014df6b | |
Nicolas Roche | 791b341b83 | |
Nicolas Roche | b78f698616 | |
Nicolas Roche | 0c8f408266 | |
Nicolas Roche | ac4eef819e | |
Nicolas Roche | 54783a392f | |
Nicolas Roche | 6ba66d06ff | |
Valentin Deniaud | 39c0fab26b | |
Nicolas Roche | c54f56633a | |
Nicolas Roche | 03b4088aff | |
Nicolas Roche | 1d56451789 | |
Nicolas Roche | b64b6f5233 | |
Valentin Deniaud | 4a4ecadd4e | |
Valentin Deniaud | 8c88ec414d | |
Valentin Deniaud | 3a63f02dae | |
Valentin Deniaud | 164433b269 | |
Valentin Deniaud | afcaed5061 | |
Valentin Deniaud | a134eabcd3 | |
Valentin Deniaud | 9dc1482d37 | |
Nicolas Roche | dff430d1d5 | |
Nicolas Roche | b763a34662 | |
Nicolas Roche | f2a66ff67b | |
Nicolas Roche | bd485a77df | |
Nicolas Roche | d0b5c579b9 | |
Nicolas Roche | 940979b2f3 | |
Nicolas Roche | 9198bb87ce | |
Nicolas Roche | 276794b487 | |
Nicolas Roche | 9676943a0d | |
Nicolas Roche | 6210cf3fa3 | |
Nicolas Roche | 4b64f39337 | |
Nicolas Roche | a61f858ebe | |
Nicolas Roche | f467587386 | |
Nicolas Roche | 761391c484 | |
Benjamin Dauvergne | c8c54b50cb | |
Nicolas Roche | 6322b75699 | |
Nicolas Roche | 5b043a4030 | |
Emmanuel Cazenave | d656b273c9 | |
Emmanuel Cazenave | 5a44fdcdeb | |
Emmanuel Cazenave | 255933f904 | |
Benjamin Dauvergne | 57d2bb06a9 | |
Benjamin Dauvergne | c7dcc2a510 | |
Benjamin Dauvergne | 2de6325c6d | |
Benjamin Dauvergne | 1f93f5506d | |
Benjamin Dauvergne | 9c85b556f2 | |
Benjamin Dauvergne | 2fbfeedb14 | |
Benjamin Dauvergne | 64456cde9f | |
Nicolas Roche | 1fa564c70d | |
Nicolas Roche | 57b9c49afb | |
Nicolas Roche | 2b5eb786dc | |
Nicolas Roche | a0ef9e293e | |
Nicolas Roche | 2e85f7e013 | |
Nicolas Roche | 3cddb32080 | |
Nicolas Roche | 479cfbb7be | |
Nicolas Roche | d533a91ad4 | |
Nicolas Roche | 7e66af136c | |
Thomas NOËL | 68fe75434a | |
Benjamin Dauvergne | b5995828a5 | |
Benjamin Dauvergne | f914b8542a | |
Benjamin Dauvergne | d42985797b | |
Benjamin Dauvergne | d882fc8e32 | |
Benjamin Dauvergne | 52356c9814 | |
Benjamin Dauvergne | 11dcc6c2e7 | |
Thomas NOËL | 88f787afe2 | |
Emmanuel Cazenave | 019559f0c1 | |
Thomas NOËL | 851a89f1fd | |
Emmanuel Cazenave | d6894315d4 | |
Emmanuel Cazenave | b2de4fdacb | |
Lauréline Guérin | 6804b6695f | |
Thomas NOËL | 5c88cee83e | |
Nicolas Roche | 4e4a1227c4 | |
Benjamin Dauvergne | 85cee0abe0 | |
Emmanuel Cazenave | d1583a99de | |
Lauréline Guérin | bc8479c489 | |
Thomas NOËL | b9841c55bf | |
Benjamin Dauvergne | bae30da93b | |
Benjamin Dauvergne | bc181b5531 | |
Benjamin Dauvergne | 132508a15d | |
Benjamin Dauvergne | 25344cbe88 | |
Benjamin Dauvergne | 3d181b33ad | |
Thomas NOËL | e34f385be2 | |
Benjamin Dauvergne | 24a0491d7a | |
Benjamin Dauvergne | 0d5db81460 | |
Benjamin Dauvergne | 0aa78ffedb | |
Benjamin Dauvergne | 687e05b28e | |
Nicolas Roche | 965a6015da | |
Nicolas Roche | d833b539b7 | |
Nicolas Roche | 910632116f | |
Nicolas Roche | e38710c4fa | |
Nicolas Roche | 526f081a29 | |
Nicolas Roche | 349e219f57 | |
Nicolas Roche | 224a2bda7b | |
Nicolas Roche | 0c0b6fe72a | |
Nicolas Roche | 50ca42ec1d | |
Nicolas Roche | da27a33dce | |
Agate | eac46f952d | |
Nicolas Roche | c84f3fa279 | |
Nicolas Roche | 1776e0fbc4 | |
Nicolas Roche | f9d278eb41 | |
Nicolas Roche | cb46e3049e | |
Nicolas Roche | de5ebe8316 | |
Paul Marillonnet | a85ebc5d28 | |
Paul Marillonnet | b542c7b956 | |
Nicolas Roche | ab55213ed7 | |
Nicolas Roche | a3e9a8c4e7 | |
Nicolas Roche | 0168bf9cf2 | |
Nicolas Roche | cf96b0436a | |
Nicolas Roche | e600299331 | |
Nicolas Roche | f8be527246 | |
Nicolas Roche | 6e5fdb7c31 | |
Thomas NOËL | 00804299d9 | |
Thomas NOËL | b29fa4998b | |
Thomas NOËL | 0d9e35cc2f | |
Nicolas Roche | 91fa126653 | |
Emmanuel Cazenave | 87c0698f55 | |
Nicolas Roche | 9233aaaede | |
Nicolas Roche | be111f5bb1 | |
Nicolas Roche | ed05841b58 | |
Nicolas Roche | dc0666eb39 | |
Nicolas Roche | 14580168e1 | |
Serghei Mihai | feb5afc99b | |
Nicolas Roche | cdc3071805 | |
Nicolas Roche | 1d86c0ac65 | |
Nicolas Roche | e012e85a4a |
|
@ -8,3 +8,5 @@ d2c0be039649febded68d9d04f745cd18b2b2e03
|
|||
989fb5271967e8e87fd57837dd6d8cfe932e7ebe
|
||||
# misc: apply djhtml (#69422)
|
||||
6da81964bd91b5656364357ec06776fed3529c8a
|
||||
# misc: apply double-quote-string-fixer (#79788)
|
||||
40142de8d2d9885f7a57f4b0f5ab1a593e13aaca
|
||||
|
|
|
@ -12,5 +12,7 @@ passerelle.egg-info/
|
|||
coverage.xml
|
||||
junit-py*.xml
|
||||
.sass-cache/
|
||||
passerelle/static/css/style.css
|
||||
passerelle/static/css/style.css.map
|
||||
passerelle/**/static/**/css/style.css
|
||||
passerelle/**/static/**/css/style.css.map
|
||||
node_modules/
|
||||
coverage/
|
||||
|
|
|
@ -1,32 +1,36 @@
|
|||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: double-quote-string-fixer
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.1.0
|
||||
rev: v3.3.1
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: ['--keep-percent-format', '--py37-plus']
|
||||
args: ['--keep-percent-format', '--py39-plus']
|
||||
- repo: https://github.com/adamchainz/django-upgrade
|
||||
rev: 1.10.0
|
||||
rev: 1.13.0
|
||||
hooks:
|
||||
- id: django-upgrade
|
||||
args: ['--target-version', '2.2']
|
||||
args: ['--target-version', '3.2']
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.3.0
|
||||
rev: 23.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
args: ['--target-version', 'py37', '--skip-string-normalization', '--line-length', '110']
|
||||
args: ['--target-version', 'py39', '--skip-string-normalization', '--line-length', '110']
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
args: ['--profile', 'black', '--line-length', '110']
|
||||
- repo: https://github.com/rtts/djhtml
|
||||
rev: 'v1.5.2'
|
||||
rev: '3.0.6'
|
||||
hooks:
|
||||
- id: djhtml
|
||||
args: ['--tabwidth', '2']
|
||||
- repo: https://git.entrouvert.org/pre-commit-debian.git
|
||||
rev: v0.1
|
||||
rev: v0.3
|
||||
hooks:
|
||||
- id: pre-commit-debian
|
||||
|
|
|
@ -6,30 +6,57 @@ pipeline {
|
|||
disableConcurrentBuilds()
|
||||
timeout(time: 30, unit: 'MINUTES')
|
||||
}
|
||||
environment {
|
||||
max = 100000
|
||||
RAND_TEST = "${Math.abs(new Random().nextInt(max+1))}"
|
||||
}
|
||||
stages {
|
||||
stage('Unit Tests') {
|
||||
steps {
|
||||
sh 'NUMPROCESSES=6 tox -rv '
|
||||
}
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
utils = new Utils()
|
||||
utils.publish_coverage('coverage.xml')
|
||||
utils.publish_coverage_native('index.html')
|
||||
utils.publish_pylint('pylint.out')
|
||||
stage('Tests (in parallel)') {
|
||||
failFast true
|
||||
parallel {
|
||||
stage('Unit Tests (pytest)') {
|
||||
steps {
|
||||
sh "NUMPROCESSES=12 RAND_TEST=${env.RAND_TEST} tox -rv"
|
||||
}
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
utils = new Utils()
|
||||
utils.publish_coverage('coverage.xml')
|
||||
utils.publish_coverage_native('index.html')
|
||||
utils.publish_pylint('pylint.out')
|
||||
}
|
||||
mergeJunitResults()
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Unit Tests (vitest)') {
|
||||
steps {
|
||||
sh "NUMPROCESSES=12 RAND_TEST=${env.RAND_TEST} tox -rv -e vitest"
|
||||
}
|
||||
}
|
||||
stage('Linter (pylint)') {
|
||||
steps {
|
||||
sh "NUMPROCESSES=12 RAND_TEST=${env.RAND_TEST} tox -rv -e pylint"
|
||||
}
|
||||
mergeJunitResults()
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Packaging') {
|
||||
steps {
|
||||
script {
|
||||
if (env.JOB_NAME == 'passerelle' && env.GIT_BRANCH == 'origin/main') {
|
||||
sh 'sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye passerelle'
|
||||
env.SHORT_JOB_NAME=sh(
|
||||
returnStdout: true,
|
||||
// given JOB_NAME=gitea/project/PR-46, returns project
|
||||
// given JOB_NAME=project/main, returns project
|
||||
script: '''
|
||||
echo "${JOB_NAME}" | sed "s/gitea\\///" | awk -F/ '{print $1}'
|
||||
'''
|
||||
).trim()
|
||||
if (env.GIT_BRANCH == 'main' || env.GIT_BRANCH == 'origin/main') {
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye,bookworm ${SHORT_JOB_NAME}"
|
||||
} else if (env.GIT_BRANCH.startsWith('hotfix/')) {
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye --branch ${env.GIT_BRANCH} --hotfix passerelle"
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d bullseye,bookworm --branch ${env.GIT_BRANCH} --hotfix ${SHORT_JOB_NAME}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
17
README
17
README
|
@ -76,7 +76,7 @@ djhtml is used to automatically indent html files, using those parameters:
|
|||
|
||||
django-upgrade is used to automatically upgrade Django syntax, using those parameters:
|
||||
|
||||
django-upgrade --target-version 2.2
|
||||
django-upgrade --target-version 3.2
|
||||
|
||||
There is .pre-commit-config.yaml to use pre-commit to automatically run these tools
|
||||
before commits. (execute `pre-commit install` to install the git hook.)
|
||||
|
@ -126,3 +126,18 @@ django-jsonresponse (https://github.com/jjay/django-jsonresponse)
|
|||
# Files: passerelle/utils/jsonresponse.py
|
||||
# Copyright (c) 2012 Yasha Borevich <j.borevich@gmail.com>
|
||||
# Licensed under the BSD license
|
||||
|
||||
tweetnacl-js (https://github.com/dchest/tweetnacl-js)
|
||||
# Files: passerelle/apps/qrcode/static/qrcode/js/nacl.min.js
|
||||
# Copyright: https://github.com/dchest/tweetnacl-js/blob/master/AUTHORS.md
|
||||
# Licensed under the Unlicense license (public domain)
|
||||
|
||||
zxing-browser (https://github.com/zxing-js/browser/)
|
||||
# Files: passerelle/apps/qrcode/static/qrcode/js/zxing-browser.min.js
|
||||
# Copyright: (c) 2018 ZXing for JS
|
||||
# Licensed under the MIT license.
|
||||
|
||||
RemixIcon (https://github.com/Remix-Design/RemixIcon)
|
||||
# Files: passerelle/apps/qrcode/static/qrcode/img/favicon.ico
|
||||
# Copyright (c) 2020 RemixIcon.com
|
||||
# Licensed under the Apache License Version 2.0
|
||||
|
|
|
@ -13,11 +13,15 @@ Homepage: https://dev.entrouvert.org/projects/passerelle
|
|||
|
||||
Package: python3-passerelle
|
||||
Architecture: all
|
||||
Depends: pdftk,
|
||||
Depends: ghostscript,
|
||||
pdftk,
|
||||
poppler-utils,
|
||||
python3-caldav,
|
||||
python3-cmislib,
|
||||
python3-cryptography,
|
||||
python3-dateutil,
|
||||
python3-distutils,
|
||||
python3-django (>= 2:2.2),
|
||||
python3-django (>= 2:3.2),
|
||||
python3-django-model-utils,
|
||||
python3-feedparser,
|
||||
python3-gadjo,
|
||||
|
@ -41,6 +45,7 @@ Depends: pdftk,
|
|||
python3-uwsgidecorators,
|
||||
python3-vobject,
|
||||
python3-xmlschema,
|
||||
python3-xmltodict,
|
||||
python3-zeep (>= 3.2),
|
||||
${misc:Depends},
|
||||
${python3:Depends},
|
||||
|
@ -58,8 +63,9 @@ Depends: adduser,
|
|||
uwsgi,
|
||||
uwsgi-plugin-python3,
|
||||
${misc:Depends},
|
||||
Recommends: memcached, nginx
|
||||
Suggests: postgresql
|
||||
Breaks: python-passerelle (<<5.75.post9)
|
||||
Replaces: python-passerelle (<<5.75.post9)
|
||||
Recommends: memcached,
|
||||
nginx,
|
||||
Suggests: postgresql,
|
||||
Breaks: python-passerelle (<<5.75.post9),
|
||||
Replaces: python-passerelle (<<5.75.post9),
|
||||
Description: Uniform access to multiple data sources and services
|
||||
|
|
|
@ -36,6 +36,11 @@ LOGGING['loggers']['paramiko.transport'] = {
|
|||
'propagate': True,
|
||||
}
|
||||
|
||||
# silence pdfrw
|
||||
LOGGING['loggers']['pdfrw'] = {
|
||||
'propagate': False,
|
||||
}
|
||||
|
||||
exec(open('/etc/%s/settings.py' % PROJECT_NAME).read())
|
||||
|
||||
# run additional settings snippets
|
||||
|
|
|
@ -4,6 +4,7 @@ After=network.target postgresql.service
|
|||
Wants=postgresql.service
|
||||
|
||||
[Service]
|
||||
SyslogIdentifier=uwsgi/%p
|
||||
Environment=PASSERELLE_SETTINGS_FILE=/usr/lib/%p/debian_config.py
|
||||
Environment=PASSERELLE_WSGI_TIMEOUT=120
|
||||
Environment=PASSERELLE_WSGI_WORKERS=5
|
||||
|
|
|
@ -18,6 +18,7 @@ spooler-python-import = passerelle.utils.spooler
|
|||
spooler-max-tasks = 20
|
||||
|
||||
# every five minutes
|
||||
unique-cron = -5 -1 -1 -1 -1 /usr/bin/passerelle-manage tenant_command cron --all-tenants every5min
|
||||
unique-cron = -5 -1 -1 -1 -1 /usr/bin/passerelle-manage tenant_command cron --all-tenants availability
|
||||
unique-cron = -5 -1 -1 -1 -1 /usr/bin/passerelle-manage tenant_command cron --all-tenants jobs
|
||||
# hourly
|
||||
|
|
|
@ -2,23 +2,23 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--url", help="Url of a passerelle Caluire Axel connector instance")
|
||||
parser.addoption("--nameid", help="Publik Name ID")
|
||||
parser.addoption("--firstname", help="first name of a user")
|
||||
parser.addoption("--lastname", help="Last name of a user")
|
||||
parser.addoption("--family", help="Family ID")
|
||||
parser.addoption('--url', help='Url of a passerelle Caluire Axel connector instance')
|
||||
parser.addoption('--nameid', help='Publik Name ID')
|
||||
parser.addoption('--firstname', help='first name of a user')
|
||||
parser.addoption('--lastname', help='Last name of a user')
|
||||
parser.addoption('--family', help='Family ID')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption("--url")
|
||||
return request.config.getoption('--url')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def user(request):
|
||||
return {
|
||||
'name_id': request.config.getoption("--nameid"),
|
||||
'first_name': request.config.getoption("--firstname"),
|
||||
'last_name': request.config.getoption("--lastname"),
|
||||
'family': request.config.getoption("--family"),
|
||||
'name_id': request.config.getoption('--nameid'),
|
||||
'first_name': request.config.getoption('--firstname'),
|
||||
'last_name': request.config.getoption('--lastname'),
|
||||
'family': request.config.getoption('--family'),
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ def test_link(conn, user):
|
|||
'NOM': user['last_name'],
|
||||
'PRENOM': user['first_name'],
|
||||
}
|
||||
print("Creating link with the following payload:")
|
||||
print('Creating link with the following payload:')
|
||||
pprint.pprint(payload)
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
|
@ -21,7 +21,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print("GET family info")
|
||||
print('GET family info')
|
||||
url = conn + '/family_info?NameID=%s' % name_id
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -30,7 +30,7 @@ def test_link(conn, user):
|
|||
assert data['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print("GET children info")
|
||||
print('GET children info')
|
||||
url = conn + '/children_info?NameID=%s' % (name_id)
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -40,7 +40,7 @@ def test_link(conn, user):
|
|||
print('\n')
|
||||
|
||||
for child in data['data']['MEMBRE']:
|
||||
print("GET child info")
|
||||
print('GET child info')
|
||||
url = conn + '/child_info?NameID=%s&idpersonne=%s' % (name_id, child['IDENT'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -49,7 +49,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print("and GET school info")
|
||||
print('and GET school info')
|
||||
url = conn + '/child_schooling_info?NameID=%s&idpersonne=%s&schooling_date=%s' % (
|
||||
name_id,
|
||||
child['IDENT'],
|
||||
|
@ -62,7 +62,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print("and GET activities info")
|
||||
print('and GET activities info')
|
||||
url = conn + '/child_activities_info?NameID=%s&idpersonne=%s&schooling_date=%s' % (
|
||||
name_id,
|
||||
child['IDENT'],
|
||||
|
@ -75,7 +75,7 @@ def test_link(conn, user):
|
|||
assert res['err'] == 0
|
||||
print('\n')
|
||||
|
||||
print("GET school list")
|
||||
print('GET school list')
|
||||
url = conn + '/school_list'
|
||||
payload = {
|
||||
'num': data['data']['RESPONSABLE1']['ADRESSE']['NORUE'],
|
||||
|
@ -92,7 +92,7 @@ def test_link(conn, user):
|
|||
print('\n')
|
||||
return
|
||||
|
||||
print("Deleting link")
|
||||
print('Deleting link')
|
||||
url = conn + '/unlink?NameID=%s' % name_id
|
||||
resp = requests.post(url)
|
||||
resp.raise_for_status()
|
||||
|
|
|
@ -5,25 +5,25 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--cmis-connector-url", help="Url of a passerelle CMIS connector instance")
|
||||
parser.addoption("--cmis-endpoint", help="Url of a passerelle CMIS endpoint")
|
||||
parser.addoption("--cmis-username", help="Username for the CMIS endpoint")
|
||||
parser.addoption("--cmis-password", help="Password for the CMIS endpoint")
|
||||
parser.addoption("--preserve-tree", action="store_true", default=False, help="Preserve test directory")
|
||||
parser.addoption('--cmis-connector-url', help='Url of a passerelle CMIS connector instance')
|
||||
parser.addoption('--cmis-endpoint', help='Url of a passerelle CMIS endpoint')
|
||||
parser.addoption('--cmis-username', help='Username for the CMIS endpoint')
|
||||
parser.addoption('--cmis-password', help='Password for the CMIS endpoint')
|
||||
parser.addoption('--preserve-tree', action='store_true', default=False, help='Preserve test directory')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def cmisclient(request):
|
||||
return cmislib.CmisClient(
|
||||
request.config.getoption("--cmis-endpoint"),
|
||||
request.config.getoption("--cmis-username"),
|
||||
request.config.getoption("--cmis-password"),
|
||||
request.config.getoption('--cmis-endpoint'),
|
||||
request.config.getoption('--cmis-username'),
|
||||
request.config.getoption('--cmis-password'),
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def cmis_connector(request):
|
||||
return request.config.getoption("--cmis-connector-url")
|
||||
return request.config.getoption('--cmis-connector-url')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
|
@ -31,6 +31,6 @@ def cmis_tmpdir(cmisclient, request):
|
|||
path = 'test-%s' % random.randint(0, 10000)
|
||||
folder = cmisclient.defaultRepository.rootFolder.createFolder(path)
|
||||
yield folder.properties['cmis:path']
|
||||
preserve_tree = request.config.getoption("--preserve-tree")
|
||||
preserve_tree = request.config.getoption('--preserve-tree')
|
||||
if not preserve_tree:
|
||||
folder.deleteTree()
|
||||
|
|
|
@ -10,7 +10,7 @@ SPECIAL_CHARS = '!#$%&+-^_`;[]{}+='
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"path,file_name",
|
||||
'path,file_name',
|
||||
[
|
||||
('', 'some.file'),
|
||||
('/toto', 'some.file'),
|
||||
|
@ -31,8 +31,8 @@ def test_uploadfile(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, monkeypatch
|
|||
response = requests.post(
|
||||
url,
|
||||
json={
|
||||
"path": cmis_tmpdir + path,
|
||||
"file": {"content": file_b64_content, "filename": file_name, "content_type": "image/jpeg"},
|
||||
'path': cmis_tmpdir + path,
|
||||
'file': {'content': file_b64_content, 'filename': file_name, 'content_type': 'image/jpeg'},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
@ -59,8 +59,8 @@ def test_uploadfile_conflict(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, mo
|
|||
response = requests.post(
|
||||
url,
|
||||
json={
|
||||
"path": cmis_tmpdir + '/uploadconflict',
|
||||
"file": {"content": file_b64_content, "filename": 'some.file', "content_type": "image/jpeg"},
|
||||
'path': cmis_tmpdir + '/uploadconflict',
|
||||
'file': {'content': file_b64_content, 'filename': 'some.file', 'content_type': 'image/jpeg'},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
@ -70,11 +70,11 @@ def test_uploadfile_conflict(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, mo
|
|||
response = requests.post(
|
||||
url,
|
||||
json={
|
||||
"path": cmis_tmpdir + '/uploadconflict',
|
||||
"file": {"content": file_b64_content, "filename": 'some.file', "content_type": "image/jpeg"},
|
||||
'path': cmis_tmpdir + '/uploadconflict',
|
||||
'file': {'content': file_b64_content, 'filename': 'some.file', 'content_type': 'image/jpeg'},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
resp_data = response.json()
|
||||
assert resp_data['err'] == 1
|
||||
assert resp_data['err_desc'].startswith("update conflict")
|
||||
assert resp_data['err_desc'].startswith('update conflict')
|
||||
|
|
|
@ -2,9 +2,9 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--url", help="Url of a passerelle Planitech connector instance")
|
||||
parser.addoption('--url', help='Url of a passerelle Planitech connector instance')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption("--url")
|
||||
return request.config.getoption('--url')
|
||||
|
|
|
@ -113,7 +113,7 @@ def test_main(conn):
|
|||
|
||||
|
||||
def call_generic(conn, endpoint):
|
||||
print("%s \n" % endpoint)
|
||||
print('%s \n' % endpoint)
|
||||
url = conn + '/%s' % endpoint
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
|
|
@ -2,25 +2,25 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--url", help="Url of a passerelle Toulouse Axel connector instance")
|
||||
parser.addoption("--nameid", help="Publik Name ID")
|
||||
parser.addoption("--firstname", help="first name of a user")
|
||||
parser.addoption("--lastname", help="Last name of a user")
|
||||
parser.addoption("--dob", help="Date of birth of a user")
|
||||
parser.addoption("--dui", help="DUI number")
|
||||
parser.addoption('--url', help='Url of a passerelle Toulouse Axel connector instance')
|
||||
parser.addoption('--nameid', help='Publik Name ID')
|
||||
parser.addoption('--firstname', help='first name of a user')
|
||||
parser.addoption('--lastname', help='Last name of a user')
|
||||
parser.addoption('--dob', help='Date of birth of a user')
|
||||
parser.addoption('--dui', help='DUI number')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption("--url")
|
||||
return request.config.getoption('--url')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def user(request):
|
||||
return {
|
||||
'name_id': request.config.getoption("--nameid"),
|
||||
'first_name': request.config.getoption("--firstname"),
|
||||
'last_name': request.config.getoption("--lastname"),
|
||||
'dob': request.config.getoption("--dob"),
|
||||
'dui': request.config.getoption("--dui"),
|
||||
'name_id': request.config.getoption('--nameid'),
|
||||
'first_name': request.config.getoption('--firstname'),
|
||||
'last_name': request.config.getoption('--lastname'),
|
||||
'dob': request.config.getoption('--dob'),
|
||||
'dui': request.config.getoption('--dui'),
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ import requests
|
|||
|
||||
|
||||
def test_link(conn, user):
|
||||
print("Get update management dates")
|
||||
print('Get update management dates')
|
||||
url = conn + '/management_dates'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -21,7 +21,7 @@ def test_link(conn, user):
|
|||
'PRENOM': user['first_name'],
|
||||
'NAISSANCE': user['dob'],
|
||||
}
|
||||
print("Creating link with the following payload:")
|
||||
print('Creating link with the following payload:')
|
||||
pprint.pprint(payload)
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
|
@ -30,7 +30,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print("GET family info")
|
||||
print('GET family info')
|
||||
url = conn + '/family_info?NameID=%s' % name_id
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -158,7 +158,7 @@ def test_link(conn, user):
|
|||
for key in flags:
|
||||
payload[key] = True
|
||||
|
||||
print("Update family info with the following payload:")
|
||||
print('Update family info with the following payload:')
|
||||
pprint.pprint(payload)
|
||||
url = conn + '/update_family_info?NameID=%s' % name_id
|
||||
resp = requests.post(url, json=payload)
|
||||
|
@ -168,7 +168,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print("GET children info")
|
||||
print('GET children info')
|
||||
url = conn + '/children_info?NameID=%s' % (name_id)
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -178,7 +178,7 @@ def test_link(conn, user):
|
|||
print('\n')
|
||||
|
||||
for child in data['data']['ENFANT']:
|
||||
print("GET child info")
|
||||
print('GET child info')
|
||||
url = conn + '/child_info?NameID=%s&idpersonne=%s' % (name_id, child['IDPERSONNE'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -187,7 +187,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print("GET child contact info")
|
||||
print('GET child contact info')
|
||||
url = conn + '/child_contacts_info?NameID=%s&idpersonne=%s' % (name_id, child['IDPERSONNE'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -196,7 +196,7 @@ def test_link(conn, user):
|
|||
pprint.pprint(res)
|
||||
print('\n')
|
||||
|
||||
print("Deleting link")
|
||||
print('Deleting link')
|
||||
url = conn + '/unlink?NameID=%s' % name_id
|
||||
resp = requests.post(url)
|
||||
resp.raise_for_status()
|
||||
|
|
|
@ -15,27 +15,39 @@ from zeep.helpers import serialize_object
|
|||
FAMILY_PAYLOAD = {
|
||||
'category': 'BI',
|
||||
'situation': 'MARI',
|
||||
'nbChild': '3',
|
||||
'nbTotalChild': '4',
|
||||
'nbAES': '1',
|
||||
'rl1': {
|
||||
'civility': 'MME',
|
||||
'firstname': 'Marge',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'maidenName': 'Bouvier',
|
||||
'quality': 'MERE',
|
||||
'birth': {'dateBirth': '1950-10-01'},
|
||||
'birth': {
|
||||
'dateBirth': '1950-10-01',
|
||||
'countryCode': '404',
|
||||
},
|
||||
'adresse': {
|
||||
'idStreet': '2317',
|
||||
'num': '4',
|
||||
'street1': 'requeried having idStreet provided',
|
||||
'town': 'Springfield',
|
||||
'zipcode': '62701',
|
||||
'town': 'Toulouse',
|
||||
'zipcode': '31400',
|
||||
},
|
||||
},
|
||||
'rl2': {
|
||||
'civility': 'MR',
|
||||
'firstname': 'Homer',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'quality': 'PERE',
|
||||
'birth': {'dateBirth': '1956-05-12'},
|
||||
'birth': {
|
||||
'dateBirth': '1956-05-12',
|
||||
'place': 'Brive-la-Gaillarde',
|
||||
'communeCode': '19031',
|
||||
'cdDepartment': '19',
|
||||
'countryCode': '',
|
||||
},
|
||||
'adresse': {
|
||||
'num': '742',
|
||||
'numComp': None,
|
||||
|
@ -84,8 +96,14 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'M',
|
||||
'firstname': 'Bart',
|
||||
'lastname': 'Simpson',
|
||||
'birth': {'dateBirth': '2014-04-01'},
|
||||
'lastname': 'Test_Simpson',
|
||||
'birth': {
|
||||
'dateBirth': '2014-04-01',
|
||||
'place': 'Brive-la-Gaillarde',
|
||||
'communeCode': '19031',
|
||||
'cdDepartment': '19',
|
||||
'countryCode': '',
|
||||
},
|
||||
'bPhoto': True,
|
||||
'bLeaveAlone': True,
|
||||
'dietcode': 'MENU_AV',
|
||||
|
@ -115,11 +133,11 @@ FAMILY_PAYLOAD = {
|
|||
'hospital': 'Springfield General Hospital',
|
||||
'vaccinList': [
|
||||
{
|
||||
'code': '45',
|
||||
'code': '8',
|
||||
'vaccinationDate': '2011-01-11',
|
||||
},
|
||||
{
|
||||
'code': '24',
|
||||
'code': '1',
|
||||
'vaccinationDate': '2022-02-22',
|
||||
},
|
||||
],
|
||||
|
@ -140,7 +158,7 @@ FAMILY_PAYLOAD = {
|
|||
'personInfo': {
|
||||
'civility': 'MR',
|
||||
'firstname': 'Abraham Jebediah',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'dateBirth': '1927-05-24',
|
||||
'sexe': 'M',
|
||||
'contact': {
|
||||
|
@ -157,7 +175,7 @@ FAMILY_PAYLOAD = {
|
|||
'personInfo': {
|
||||
'civility': 'MME',
|
||||
'firstname': 'Mona Penelope',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'dateBirth': '1929-03-15',
|
||||
'sexe': 'F',
|
||||
'contact': {
|
||||
|
@ -175,7 +193,7 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'F',
|
||||
'firstname': 'Lisa',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'birth': {'dateBirth': '2016-05-09'},
|
||||
'dietcode': 'MENU_SV',
|
||||
'paiInfoBean': {
|
||||
|
@ -185,7 +203,7 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'F',
|
||||
'firstname': 'Maggie',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'birth': {'dateBirth': '2018-12-17'},
|
||||
'dietcode': 'MENU_PAI',
|
||||
'paiInfoBean': {
|
||||
|
@ -195,7 +213,7 @@ FAMILY_PAYLOAD = {
|
|||
{
|
||||
'sexe': 'M',
|
||||
'firstname': 'Hugo',
|
||||
'lastname': 'Simpson',
|
||||
'lastname': 'Test_Simpson',
|
||||
'birth': {'dateBirth': '2018-04-01'},
|
||||
'dietcode': 'MENU_AV',
|
||||
'paiInfoBean': {
|
||||
|
@ -243,7 +261,10 @@ def pytest_addoption(parser):
|
|||
parser.addoption('--nameid', help='Publik Name ID', default='functest')
|
||||
parser.addoption('--dui', help='DUI number', default='')
|
||||
parser.addoption(
|
||||
'--lastname', help='override lastname to create a new "update" family', default='Simpson'
|
||||
'--lastname', help='override lastname to create a new "update" family', default='Test_Simpson'
|
||||
)
|
||||
parser.addoption(
|
||||
'--quick', action='store_true', help='do not reload referentials to speed-up tests', default=False
|
||||
)
|
||||
|
||||
|
||||
|
@ -319,6 +340,7 @@ def remove_id_on_child(conn, child):
|
|||
del child['indicators'] # order may change
|
||||
child['subscribeSchoolList'] = [] # not managed by test yet
|
||||
child['subscribeActivityList'] = [] # not managed by test yet
|
||||
del child['subscribe_natures'] # order may change
|
||||
|
||||
|
||||
def remove_id_on_rlg(conn, rlg):
|
||||
|
@ -327,7 +349,11 @@ def remove_id_on_rlg(conn, rlg):
|
|||
rlg['lastname'] = 'N/A'
|
||||
remove_extra_indicators(conn, rlg['indicatorList'], 'rl-indicator')
|
||||
rlg['indicatorList'].sort(key=lambda x: x['code'])
|
||||
rlg['quotientList'].sort(key=lambda x: (x['yearRev'], x['dateStart']))
|
||||
del rlg['indicators'] # order may change
|
||||
del rlg['quotients'] # order may change
|
||||
rlg['subscribeActivityList'] = [] # not managed by test yet
|
||||
del rlg['subscribe_natures'] # order may change
|
||||
|
||||
|
||||
def remove_id_on_family(conn, family):
|
||||
|
@ -380,7 +406,10 @@ def conn(request):
|
|||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def referentials(conn):
|
||||
def referentials(request, conn):
|
||||
quick = request.config.getoption('--quick')
|
||||
if quick:
|
||||
return
|
||||
url = urlparse.urlparse(conn)
|
||||
slug = url.path.split('/')[2]
|
||||
cmd = (
|
||||
|
@ -393,10 +422,10 @@ def referentials(conn):
|
|||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def create_data(request, conn):
|
||||
def create_data(request, conn, reference_year):
|
||||
name_id = request.config.getoption('--nameid')
|
||||
unlink(conn, name_id)
|
||||
lastname = uuid4().hex[0:30]
|
||||
lastname = 'TEST_' + uuid4().hex[0:25]
|
||||
|
||||
# create family
|
||||
create_family_payload = copy.deepcopy(FAMILY_PAYLOAD)
|
||||
|
@ -420,6 +449,21 @@ def create_data(request, conn):
|
|||
resp.raise_for_status()
|
||||
create_result = resp.json()
|
||||
assert create_result['err'] == 0
|
||||
|
||||
# add requiered quotient for subscriptions
|
||||
data = read_family(conn, name_id)
|
||||
url = conn + '/update-quotient?NameID=%s&rl_id=%s' % (name_id, data['RL1']['num'])
|
||||
payload = {
|
||||
'yearRev': str(reference_year),
|
||||
'dateStart': '%s-09-01' % (reference_year),
|
||||
'dateEnd': '3000-08-31',
|
||||
'mtt': '5000.0',
|
||||
'cdquo': '1',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
print('\ncreate DUI: %s' % str(create_result['data']['number']))
|
||||
data = diff_family(conn, name_id, 'test_create_family.json')
|
||||
|
||||
|
@ -430,6 +474,60 @@ def create_data(request, conn):
|
|||
'lastname': lastname,
|
||||
'rl1_num': data['RL1']['num'],
|
||||
'bart_num': data['childList'][0]['num'],
|
||||
'maggie_num': data['childList'][1]['num'],
|
||||
'hugo_num': data['childList'][2]['num'],
|
||||
'data': data,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def create_data2(request, conn, reference_year):
|
||||
name_id = request.config.getoption('--nameid')
|
||||
unlink(conn, name_id)
|
||||
lastname = 'TEST_' + uuid4().hex[0:25]
|
||||
|
||||
# create family that is not located into Toulouse
|
||||
create_family_payload = copy.deepcopy(FAMILY_PAYLOAD)
|
||||
create_family_payload['rl1']['lastname'] = lastname
|
||||
create_family_payload['rl1']['adresse'] = create_family_payload['rl2']['adresse']
|
||||
create_family_payload['rl2']['adresse'] = copy.deepcopy(FAMILY_PAYLOAD['rl1']['adresse'])
|
||||
for child in create_family_payload['childList']:
|
||||
child['lastname'] = lastname
|
||||
|
||||
url = conn + '/create-family?NameID=%s' % name_id
|
||||
resp = requests.post(url, json=create_family_payload)
|
||||
resp.raise_for_status()
|
||||
create_result = resp.json()
|
||||
assert create_result['err'] == 0
|
||||
|
||||
# add requiered quotient for subscriptions
|
||||
data = read_family(conn, name_id)
|
||||
url = conn + '/update-quotient?NameID=%s&rl_id=%s' % (name_id, data['RL1']['num'])
|
||||
payload = {
|
||||
'yearRev': str(reference_year),
|
||||
'dateStart': '2023-05-15',
|
||||
'dateEnd': '3000-12-31',
|
||||
'mtt': '5000.0',
|
||||
'cdquo': '1',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
print('\ncreate DUI again: %s' % str(create_result['data']['number']))
|
||||
data = diff_family(conn, name_id, 'test_create_family_out_town.json')
|
||||
|
||||
return {
|
||||
'name_id': name_id, # linked
|
||||
'family_id': str(create_result['data']['number']),
|
||||
'family_payload': create_family_payload,
|
||||
'lastname': lastname,
|
||||
'rl1_num': data['RL1']['num'],
|
||||
'rl2_num': data['RL2']['num'],
|
||||
'bart_num': data['childList'][0]['num'],
|
||||
'lisa_num': data['childList'][1]['num'],
|
||||
'maggie_num': data['childList'][2]['num'],
|
||||
'hugo_num': data['childList'][3]['num'],
|
||||
'data': data,
|
||||
}
|
||||
|
||||
|
@ -501,3 +599,232 @@ def update_data(request, conn):
|
|||
'maggie_num': data['childList'][2]['num'],
|
||||
'data': data,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def reference_year():
|
||||
some_date = datetime.date.today()
|
||||
if some_date.month <= 8:
|
||||
# between january and august, reference year is the year just before
|
||||
return some_date.year - 1
|
||||
return some_date.year
|
||||
|
||||
|
||||
def get_subscription_info(nature, activity_text, unit_text, place_text, con, name_id, person_id, year):
|
||||
def select_item(resp, text):
|
||||
item = None
|
||||
for item in resp.json()['data']:
|
||||
if item['text'] == text:
|
||||
break
|
||||
else:
|
||||
raise Exception("do not find '%s'" % text)
|
||||
return item
|
||||
|
||||
# select activity
|
||||
url = con + '/get-person-activity-list'
|
||||
params = {
|
||||
'NameID': name_id,
|
||||
'person_id': person_id,
|
||||
'start_date': '%s-09-01' % year,
|
||||
'end_date': '%s-08-31' % (year + 1),
|
||||
'nature': nature,
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
activity = select_item(resp, activity_text)
|
||||
|
||||
# select unit
|
||||
url = con + '/get-person-unit-list'
|
||||
params = {
|
||||
'NameID': name_id,
|
||||
'person_id': person_id,
|
||||
'start_date': '%s-09-01' % year,
|
||||
'end_date': '%s-08-31' % (year + 1),
|
||||
'activity_id': activity['id'],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
unit = select_item(resp, unit_text)
|
||||
|
||||
# select place
|
||||
url = con + '/get-person-place-list'
|
||||
params = {
|
||||
'NameID': name_id,
|
||||
'person_id': person_id,
|
||||
'start_date': '%s-09-01' % year,
|
||||
'end_date': '%s-08-31' % (year + 1),
|
||||
'activity_id': activity['id'],
|
||||
'unit_id': unit['id'],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
place = select_item(resp, place_text)
|
||||
assert place['capacityInfo']['controlOK'] is True
|
||||
|
||||
# check subscription info
|
||||
url = con + '/get-person-subscription-info'
|
||||
params = {
|
||||
'NameID': name_id,
|
||||
'person_id': person_id,
|
||||
'activity_id': activity['id'],
|
||||
'unit_id': unit['id'],
|
||||
'place_id': place['id'],
|
||||
'ref_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
info = resp.json()['data']
|
||||
assert info['controlResult']['controlOK'] is True
|
||||
return {
|
||||
'activity': activity,
|
||||
'unit': unit,
|
||||
'place': place,
|
||||
'info': info,
|
||||
}
|
||||
|
||||
|
||||
def get_loisirs_subscribe_info(con, data, year):
|
||||
return get_subscription_info(
|
||||
'LOISIRS',
|
||||
# Sigec made this loisirs activity available for functests
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES',
|
||||
'MERCREDI - 15h30/17h - 8/15Ans',
|
||||
'ARGOULETS',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
def get_loisirs_subscribe_info3(con, data, year):
|
||||
return get_subscription_info(
|
||||
'LOISIRS',
|
||||
# Sigec made this loisirs activity available for functests
|
||||
'Vitrail Fusing 1/2 Je Adultes',
|
||||
'Inscription annuelle',
|
||||
'Centre Culturel ALBAN MINVILLE',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
def get_extrasco_subscribe_info(con, data, year):
|
||||
return get_subscription_info(
|
||||
'EXTRASCO',
|
||||
# Sigec made this extra-sco activity available for functests
|
||||
'ADL ELEMENTAIRE Maourine Juin',
|
||||
'PUBLIK ADL ELEMENTAIRE Maourine JUIN 22/23(NE PAS UTILISER)',
|
||||
'MAOURINE (la) ELEMENTAIRE',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
def get_extrasco_subscribe_info2(con, data, year):
|
||||
return get_subscription_info(
|
||||
'EXTRASCO',
|
||||
# Sigec made this extra-sco activity available for functests
|
||||
'ADL MATERNELLE Lardenne Juin',
|
||||
'PUBLIK ADL MATER JOURNEE AVEC REPAS',
|
||||
'LARDENNE MATERNELLE',
|
||||
con,
|
||||
data['name_id'],
|
||||
data['bart_num'],
|
||||
year,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def loisirs_subscribe_info(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
return get_loisirs_subscribe_info(conn, create_data, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def loisirs_subscribe_info2(conn, create_data2, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
return get_loisirs_subscribe_info(conn, create_data2, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def loisirs_subscribe_info3(conn, create_data2, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
return get_loisirs_subscribe_info3(conn, create_data2, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def extrasco_subscribe_info(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
return get_extrasco_subscribe_info(conn, create_data, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def extrasco_subscribe_info2(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
return get_extrasco_subscribe_info2(conn, create_data, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def extrasco_subscribe_info3(conn, create_data2, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
return get_extrasco_subscribe_info2(conn, create_data2, reference_year)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def perisco_subscribe_info(conn, create_data, reference_year):
|
||||
'''This fixture is a configuration trick from Sigec
|
||||
as peri-sco should not be available for subscription
|
||||
and as a consequence, should not be displayed from catalogs'''
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
return get_subscription_info(
|
||||
None,
|
||||
# Sigec made this peri-sco activity available for functests
|
||||
'Temps du midi',
|
||||
'TEST TEMPS DU MIDI 22/23',
|
||||
'AMIDONNIERS ELEMENTAIRE',
|
||||
conn,
|
||||
create_data['name_id'],
|
||||
create_data['bart_num'],
|
||||
reference_year,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def perisco_subscribe_adulte_info(conn, create_data2, reference_year):
|
||||
'''This fixture is a configuration trick from Sigec
|
||||
as peri-sco should not be available for subscription
|
||||
and as a consequence, should not be displayed from catalogs'''
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
return get_subscription_info(
|
||||
None,
|
||||
# Sigec made this peri-sco activity available for functests
|
||||
'RESTAURATION ADULTE',
|
||||
'TEST RESTAURATION ADULTE 22/23',
|
||||
'DOLTO FRANCOISE MATERNELLE',
|
||||
conn,
|
||||
create_data2['name_id'],
|
||||
create_data2['bart_num'],
|
||||
reference_year,
|
||||
)
|
||||
|
|
|
@ -7,6 +7,14 @@
|
|||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUTO_OUT",
|
||||
"code": "AUTO_OUT",
|
||||
"text": "Autorisation de sortie - CLAE",
|
||||
"libelle": "Autorisation de sortie - CLAE",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUTRE",
|
||||
"code": "AUTRE",
|
||||
|
@ -16,6 +24,30 @@
|
|||
"isActive": true,
|
||||
"note": "rebellious"
|
||||
},
|
||||
{
|
||||
"id": "AUT_OUTADL",
|
||||
"code": "AUT_OUTADL",
|
||||
"text": "Autorisation de sortie - ADL",
|
||||
"libelle": "Autorisation de sortie - ADL",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUT_SANT",
|
||||
"code": "AUT_SANT",
|
||||
"text": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"libelle": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AUT_TRANS",
|
||||
"code": "AUT_TRANS",
|
||||
"text": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"libelle": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "AVL",
|
||||
"code": "AVL",
|
||||
|
@ -41,6 +73,14 @@
|
|||
"isActive": false,
|
||||
"note": null
|
||||
},
|
||||
{
|
||||
"id": "HPURG",
|
||||
"code": "HPURG",
|
||||
"text": "Hospitalisation / musures d'urgence",
|
||||
"libelle": "Hospitalisation / musures d'urgence",
|
||||
"typeDesc": "NONE",
|
||||
"isActive": false
|
||||
},
|
||||
{
|
||||
"id": "LENTILLE",
|
||||
"code": "LENTILLE",
|
||||
|
|
|
@ -7,7 +7,8 @@
|
|||
"dateBirth": "2016-05-09T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": null,
|
||||
"bPhoto": false,
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
"category": "BI",
|
||||
"situation": "MARI",
|
||||
"flagCom": true,
|
||||
"nbChild": null,
|
||||
"nbTotalChild": null,
|
||||
"nbAES": null,
|
||||
"nbChild": 3,
|
||||
"nbTotalChild": 4,
|
||||
"nbAES": "1",
|
||||
"RL1": {
|
||||
"num": "N/A",
|
||||
"firstname": "MARGE",
|
||||
|
@ -17,7 +17,9 @@
|
|||
"dateBirth": "1950-10-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": "2317",
|
||||
|
@ -25,8 +27,8 @@
|
|||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Springfield",
|
||||
"zipcode": "62701",
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
|
@ -40,7 +42,17 @@
|
|||
"profession": null,
|
||||
"CAFInfo": null,
|
||||
"indicatorList": [],
|
||||
"quotientList": [],
|
||||
"quotientList": [
|
||||
{
|
||||
"yearRev": 2022,
|
||||
"dateStart": "2022-09-01T00:00:00+02:00",
|
||||
"dateEnd": "3000-08-31T00:00:00+02:00",
|
||||
"mtt": 5000.0,
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
}
|
||||
],
|
||||
"subscribeActivityList": [],
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "M\u00e8re"
|
||||
|
@ -55,9 +67,13 @@
|
|||
"sexe": "M",
|
||||
"birth": {
|
||||
"dateBirth": "2014-04-01T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": true,
|
||||
|
@ -66,7 +82,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "SIMPSON",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
|
@ -88,7 +104,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "SIMPSON",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
@ -144,13 +160,13 @@
|
|||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
@ -186,7 +202,8 @@
|
|||
"dateBirth": "2018-12-17T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_PAI",
|
||||
"bPhoto": false,
|
||||
|
@ -219,7 +236,8 @@
|
|||
"dateBirth": "2018-04-01T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": false,
|
||||
|
|
|
@ -0,0 +1,408 @@
|
|||
{
|
||||
"number": "N/A",
|
||||
"category": "BI",
|
||||
"situation": "MARI",
|
||||
"flagCom": false,
|
||||
"nbChild": 3,
|
||||
"nbTotalChild": 4,
|
||||
"nbAES": "1",
|
||||
"RL1": {
|
||||
"num": "N/A",
|
||||
"firstname": "MARGE",
|
||||
"lastname": "N/A",
|
||||
"maidenName": "BOUVIER",
|
||||
"quality": "MERE",
|
||||
"civility": "MME",
|
||||
"birth": {
|
||||
"dateBirth": "1950-10-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
"num": 742,
|
||||
"numComp": null,
|
||||
"street1": "Evergreen Terrace",
|
||||
"street2": null,
|
||||
"town": "Springfield",
|
||||
"zipcode": "90701"
|
||||
},
|
||||
"contact": {
|
||||
"phone": null,
|
||||
"mobile": null,
|
||||
"mail": null,
|
||||
"isContactMail": false,
|
||||
"isContactSms": false,
|
||||
"isInvoicePdf": false
|
||||
},
|
||||
"profession": null,
|
||||
"CAFInfo": null,
|
||||
"indicatorList": [],
|
||||
"quotientList": [
|
||||
{
|
||||
"yearRev": 2022,
|
||||
"dateStart": "2023-05-15T00:00:00+02:00",
|
||||
"dateEnd": "3000-12-31T00:00:00+01:00",
|
||||
"mtt": 5000.0,
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
}
|
||||
],
|
||||
"subscribeActivityList": [],
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "M\u00e8re"
|
||||
},
|
||||
"RL2": {
|
||||
"num": "N/A",
|
||||
"firstname": "HOMER",
|
||||
"lastname": "N/A",
|
||||
"maidenName": null,
|
||||
"quality": "PERE",
|
||||
"civility": "MR",
|
||||
"birth": {
|
||||
"dateBirth": "1956-05-12T00:00:00+01:00",
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": "2317",
|
||||
"num": 4,
|
||||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
"phone": "0122222222",
|
||||
"mobile": "0622222222",
|
||||
"mail": "homer.simpson@example.org.com",
|
||||
"isContactMail": true,
|
||||
"isContactSms": true,
|
||||
"isInvoicePdf": true
|
||||
},
|
||||
"profession": {
|
||||
"codeCSP": "46",
|
||||
"profession": "Inspecteur de s\u00e9curit\u00e9",
|
||||
"employerName": "Burns",
|
||||
"phone": "0133333333",
|
||||
"addressPro": {
|
||||
"num": null,
|
||||
"street": null,
|
||||
"zipcode": "90701",
|
||||
"town": "Springfield"
|
||||
},
|
||||
"situation": null,
|
||||
"weeklyHours": null,
|
||||
"codeCSP_text": "EMPLOYES"
|
||||
},
|
||||
"CAFInfo": {
|
||||
"number": "123",
|
||||
"organ": "GENE",
|
||||
"organ_text": "CAF 31"
|
||||
},
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "AVL",
|
||||
"libelle": "Auxiliaire de Vie loisirs",
|
||||
"note": null,
|
||||
"choice": null,
|
||||
"code_text": "Auxiliaire de Vie loisirs"
|
||||
},
|
||||
{
|
||||
"code": "ETABSPEC",
|
||||
"libelle": "Etablissement sp\u00e9cialis\u00e9",
|
||||
"note": "SNPP",
|
||||
"choice": null,
|
||||
"code_text": "Etablissement sp\u00e9cialis\u00e9"
|
||||
}
|
||||
],
|
||||
"quotientList": [],
|
||||
"subscribeActivityList": [],
|
||||
"civility_text": "MONSIEUR",
|
||||
"quality_text": "P\u00e8re"
|
||||
},
|
||||
"quotientList": [],
|
||||
"childList": [
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "BART",
|
||||
"sexe": "M",
|
||||
"birth": {
|
||||
"dateBirth": "2014-04-01T00:00:00+02:00",
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": true,
|
||||
"bLeaveAlone": true,
|
||||
"authorizedPersonList": [
|
||||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
"sexe": "M",
|
||||
"contact": {
|
||||
"phone": "0312345678",
|
||||
"mobile": null,
|
||||
"mail": "abe.simpson@example.org"
|
||||
},
|
||||
"civility_text": "MONSIEUR",
|
||||
"sexe_text": "Masculin"
|
||||
},
|
||||
"personQuality": {
|
||||
"code": "13",
|
||||
"libelle": "Famille",
|
||||
"code_text": "Famille"
|
||||
}
|
||||
},
|
||||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
"sexe": "F",
|
||||
"contact": {
|
||||
"phone": "0412345678",
|
||||
"mobile": "0612345678",
|
||||
"mail": "mona.simpson@example.org"
|
||||
},
|
||||
"civility_text": "MADAME",
|
||||
"sexe_text": "F\u00e9minin"
|
||||
},
|
||||
"personQuality": {
|
||||
"code": "13",
|
||||
"libelle": "Famille",
|
||||
"code_text": "Famille"
|
||||
}
|
||||
}
|
||||
],
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "AUTRE",
|
||||
"libelle": "Autre",
|
||||
"note": "rebellious",
|
||||
"choice": null,
|
||||
"code_text": "Autre"
|
||||
},
|
||||
{
|
||||
"code": "LUNETTE",
|
||||
"libelle": "Port de lunettes",
|
||||
"note": null,
|
||||
"choice": null,
|
||||
"code_text": "Port de lunettes"
|
||||
}
|
||||
],
|
||||
"medicalRecord": {
|
||||
"familyDoctor": {
|
||||
"name": "MONROE",
|
||||
"phone": "0612341234",
|
||||
"address": {
|
||||
"street1": "Alameda",
|
||||
"zipcode": "90701",
|
||||
"town": "Springfield"
|
||||
}
|
||||
},
|
||||
"allergy1": "butterscotch, imitation butterscotch, glow-in-the-dark monster make-up",
|
||||
"allergy2": "shrimp and cauliflower",
|
||||
"comment1": "the shrimp allergy isn't fully identified",
|
||||
"comment2": null,
|
||||
"observ1": "Ay Caramba!",
|
||||
"observ2": "Eat my shorts!",
|
||||
"isAuthHospital": false,
|
||||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
},
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_01",
|
||||
"dateDeb": "2022-09-01T00:00:00+02:00",
|
||||
"dateFin": "2023-07-01T00:00:00+02:00",
|
||||
"description": "mischievous, rebellious, misunderstood, disruptive",
|
||||
"code_text": "PAI Alimentaire Int\u00e9gral"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "Masculin",
|
||||
"dietcode_text": "Avec viande"
|
||||
},
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "LISA",
|
||||
"sexe": "F",
|
||||
"birth": {
|
||||
"dateBirth": "2016-05-09T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_SV",
|
||||
"bPhoto": false,
|
||||
"bLeaveAlone": false,
|
||||
"authorizedPersonList": [],
|
||||
"indicatorList": [],
|
||||
"medicalRecord": null,
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_02",
|
||||
"dateDeb": null,
|
||||
"dateFin": null,
|
||||
"description": null,
|
||||
"code_text": "PAI Alimentaire Partiel"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "F\u00e9minin",
|
||||
"dietcode_text": "Sans viande"
|
||||
},
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "MAGGIE",
|
||||
"sexe": "F",
|
||||
"birth": {
|
||||
"dateBirth": "2018-12-17T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_PAI",
|
||||
"bPhoto": false,
|
||||
"bLeaveAlone": false,
|
||||
"authorizedPersonList": [],
|
||||
"indicatorList": [],
|
||||
"medicalRecord": null,
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_02",
|
||||
"dateDeb": null,
|
||||
"dateFin": null,
|
||||
"description": null,
|
||||
"code_text": "PAI Alimentaire Partiel"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "F\u00e9minin",
|
||||
"dietcode_text": "Panier PAI"
|
||||
},
|
||||
{
|
||||
"num": "N/A",
|
||||
"lastname": "N/A",
|
||||
"firstname": "HUGO",
|
||||
"sexe": "M",
|
||||
"birth": {
|
||||
"dateBirth": "2018-04-01T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": false,
|
||||
"bLeaveAlone": false,
|
||||
"authorizedPersonList": [],
|
||||
"indicatorList": [],
|
||||
"medicalRecord": null,
|
||||
"insurance": null,
|
||||
"paiInfoBean": {
|
||||
"code": "PAI_01",
|
||||
"dateDeb": null,
|
||||
"dateFin": null,
|
||||
"description": null,
|
||||
"code_text": "PAI Alimentaire Int\u00e9gral"
|
||||
},
|
||||
"mother": "N/A",
|
||||
"father": "N/A",
|
||||
"rl": null,
|
||||
"subscribeSchoolList": [],
|
||||
"subscribeActivityList": [],
|
||||
"sexe_text": "Masculin",
|
||||
"dietcode_text": "Avec viande"
|
||||
}
|
||||
],
|
||||
"emergencyPersonList": [
|
||||
{
|
||||
"numPerson": "N/A",
|
||||
"civility": "MME",
|
||||
"firstname": "PATTY",
|
||||
"lastname": "BOUVIER",
|
||||
"dateBirth": "1948-08-30T00:00:00+01:00",
|
||||
"sexe": "F",
|
||||
"quality": "13",
|
||||
"contact": {
|
||||
"phone": "0112345678",
|
||||
"mobile": "0612345678",
|
||||
"mail": "patty.bouvier@example.org"
|
||||
},
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "Famille",
|
||||
"sexe_text": "F\u00e9minin"
|
||||
},
|
||||
{
|
||||
"numPerson": "N/A",
|
||||
"civility": "MME",
|
||||
"firstname": "SELMA",
|
||||
"lastname": "BOUVIER",
|
||||
"dateBirth": "1946-04-29T00:00:00+01:00",
|
||||
"sexe": "F",
|
||||
"quality": "13",
|
||||
"contact": {
|
||||
"phone": "0112345678",
|
||||
"mobile": "0612345678",
|
||||
"mail": "selma.bouvier@example.org"
|
||||
},
|
||||
"civility_text": "MADAME",
|
||||
"quality_text": "Famille",
|
||||
"sexe_text": "F\u00e9minin"
|
||||
}
|
||||
],
|
||||
"indicatorList": [],
|
||||
"childErrorList": [],
|
||||
"category_text": "BIPARENTALE",
|
||||
"situation_text": "MARIE(E)",
|
||||
"family_id": "N/A"
|
||||
}
|
|
@ -7,9 +7,13 @@
|
|||
"civility": "MR",
|
||||
"birth": {
|
||||
"dateBirth": "1956-05-12T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
|
|
|
@ -0,0 +1,125 @@
|
|||
[
|
||||
{
|
||||
"id": "INDI_APE_ENF",
|
||||
"text": "INDI_APE_ENF",
|
||||
"level": "INDI_APE_ENF",
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "APE_COMPO3",
|
||||
"libelle": "CF-0/1 actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HBOTH",
|
||||
"libelle": "SP-handicap parent et fratrie",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HPAR",
|
||||
"libelle": "SP-handicap parents",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_MULTIACC",
|
||||
"libelle": "CF-2 enfants \u00e0 accueillir",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_SITUP",
|
||||
"libelle": "SP-situation particuli\u00e8re personne",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "INDI_APE_FAM",
|
||||
"text": "INDI_APE_FAM",
|
||||
"level": "INDI_APE_FAM",
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "APE_COMPO2",
|
||||
"libelle": "CF-1/2 actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_COMPO4",
|
||||
"libelle": "CF-0/2 actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_FIRSTC",
|
||||
"libelle": "CF-premier enfant",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HAND",
|
||||
"libelle": "H-handicap ou maladie chronique",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_NAIM",
|
||||
"libelle": "CF-naissance multiple",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "INDI_APE_RES",
|
||||
"text": "INDI_APE_RES",
|
||||
"level": "INDI_APE_RES",
|
||||
"indicatorList": [
|
||||
{
|
||||
"code": "APE_COMPO1",
|
||||
"libelle": "CF-100% actif",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_FRAT",
|
||||
"libelle": "CF-Fratrie d\u00e9j\u00e0 en accueil",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_KOFRAT",
|
||||
"libelle": "CF-sans proposition pour une partie de la fratrie",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_HFRAT",
|
||||
"libelle": "SP-handicap fratrie",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_SPLOG",
|
||||
"libelle": "SP-situation particuli\u00e8re logement",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE_ALLO",
|
||||
"libelle": "SP-accompagnement enfant allophone",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"code": "APE-MINE",
|
||||
"libelle": "SP-parent mineur",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
|
@ -1,4 +1,20 @@
|
|||
[
|
||||
{
|
||||
"id": "AUT_OUTADL",
|
||||
"code": "AUT_OUTADL",
|
||||
"text": "Autorisation de sortie - ADL",
|
||||
"libelle": "Autorisation de sortie - ADL",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AUTO_OUT",
|
||||
"code": "AUTO_OUT",
|
||||
"text": "Autorisation de sortie - CLAE",
|
||||
"libelle": "Autorisation de sortie - CLAE",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AUTRE",
|
||||
"code": "AUTRE",
|
||||
|
@ -31,6 +47,30 @@
|
|||
"typeDesc": "NOTE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "HPURG",
|
||||
"code": "HPURG",
|
||||
"text": "Hospitalisation / musures d'urgence",
|
||||
"libelle": "Hospitalisation / musures d'urgence",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AUT_SANT",
|
||||
"code": "AUT_SANT",
|
||||
"text": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"libelle": "J'autorise le responsable d'\u00e9tablissement \u00e0 prendre, en cas d'urgence des mesures rendues n\u00e9cessaires par l'\u00e9tat de sant\u00e9 de mon enfant",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "AUT_TRANS",
|
||||
"code": "AUT_TRANS",
|
||||
"text": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"libelle": "J'autorise mon enfant \u00e0 prendre les transports de la collectivit\u00e9",
|
||||
"typeDesc": "NONE",
|
||||
"choiceList": []
|
||||
},
|
||||
{
|
||||
"id": "MDPH",
|
||||
"code": "MDPH",
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
[
|
||||
{
|
||||
"id": "MORAL",
|
||||
"code": "MORAL",
|
||||
"text": "",
|
||||
"libelle": null
|
||||
},
|
||||
{
|
||||
"id": "MME",
|
||||
"code": "MME",
|
||||
|
|
|
@ -1,4 +1,11 @@
|
|||
[
|
||||
{
|
||||
"id": "87",
|
||||
"code": "87",
|
||||
"rang": "PERSON",
|
||||
"text": "Acte de d\u00e9c\u00e8s",
|
||||
"libelle": "Acte de d\u00e9c\u00e8s"
|
||||
},
|
||||
{
|
||||
"id": "43",
|
||||
"code": "43",
|
||||
|
@ -188,6 +195,13 @@
|
|||
"text": "Certificat de scolarit\u00e9",
|
||||
"libelle": "Certificat de scolarit\u00e9"
|
||||
},
|
||||
{
|
||||
"id": "93",
|
||||
"code": "93",
|
||||
"rang": "PERSON",
|
||||
"text": "Certificat de travail",
|
||||
"libelle": "Certificat de travail"
|
||||
},
|
||||
{
|
||||
"id": "74",
|
||||
"code": "74",
|
||||
|
@ -219,7 +233,7 @@
|
|||
{
|
||||
"id": "37",
|
||||
"code": "37",
|
||||
"rang": "FAMILY",
|
||||
"rang": "PERSON",
|
||||
"text": "D\u00e9claration conjointe sign\u00e9e des parents",
|
||||
"libelle": "D\u00e9claration conjointe sign\u00e9e des parents"
|
||||
},
|
||||
|
@ -261,14 +275,14 @@
|
|||
{
|
||||
"id": "64",
|
||||
"code": "64",
|
||||
"rang": "FAMILY",
|
||||
"rang": "PERSON",
|
||||
"text": "Jugement des affaires familiales",
|
||||
"libelle": "Jugement des affaires familiales"
|
||||
},
|
||||
{
|
||||
"id": "65",
|
||||
"code": "65",
|
||||
"rang": "FAMILY",
|
||||
"rang": "PERSON",
|
||||
"text": "Jugement mise sous tutelle",
|
||||
"libelle": "Jugement mise sous tutelle"
|
||||
},
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
[
|
||||
{
|
||||
"id": "05DERO-8",
|
||||
"code": "05DERO-8",
|
||||
"text": "DERO05 - SANTE",
|
||||
"libelle": "DERO05 - SANTE"
|
||||
},
|
||||
{
|
||||
"id": "05DERO-6",
|
||||
"code": "05DERO-6",
|
||||
"text": "DERO05 - SANTE : SANTE / ORGANISATION",
|
||||
"libelle": "DERO05 - SANTE : SANTE / ORGANISATION"
|
||||
},
|
||||
{
|
||||
"id": "10DERO-2",
|
||||
"code": "10DERO-2",
|
||||
"text": "DERO10 - ORGANISATION",
|
||||
"libelle": "DERO10 - ORGANISATION"
|
||||
},
|
||||
{
|
||||
"id": "11DERO-1",
|
||||
"code": "11DERO-1",
|
||||
"text": "DERO11 - AUTRE",
|
||||
"libelle": "DERO11 - AUTRE"
|
||||
}
|
||||
]
|
|
@ -0,0 +1,56 @@
|
|||
[
|
||||
{
|
||||
"id": 102,
|
||||
"code": 102,
|
||||
"text": "CANTINE / CLAE",
|
||||
"libelle": "CANTINE / CLAE"
|
||||
},
|
||||
{
|
||||
"id": 103,
|
||||
"code": 103,
|
||||
"text": "CCAS",
|
||||
"libelle": "CCAS"
|
||||
},
|
||||
{
|
||||
"id": 101,
|
||||
"code": 101,
|
||||
"text": "DASC",
|
||||
"libelle": "DASC"
|
||||
},
|
||||
{
|
||||
"id": 104,
|
||||
"code": 104,
|
||||
"text": "DSCS",
|
||||
"libelle": "DSCS"
|
||||
},
|
||||
{
|
||||
"id": 105,
|
||||
"code": 105,
|
||||
"text": "ENFANCE LOISIRS",
|
||||
"libelle": "ENFANCE LOISIRS"
|
||||
},
|
||||
{
|
||||
"id": 106,
|
||||
"code": 106,
|
||||
"text": "PARCOURS EDUCATIFS",
|
||||
"libelle": "PARCOURS EDUCATIFS"
|
||||
},
|
||||
{
|
||||
"id": 107,
|
||||
"code": 107,
|
||||
"text": "REMBOURSEMENT",
|
||||
"libelle": "REMBOURSEMENT"
|
||||
},
|
||||
{
|
||||
"id": 108,
|
||||
"code": 108,
|
||||
"text": "SENIORS",
|
||||
"libelle": "SENIORS"
|
||||
},
|
||||
{
|
||||
"id": 109,
|
||||
"code": 109,
|
||||
"text": "SPORT",
|
||||
"libelle": "SPORT"
|
||||
}
|
||||
]
|
|
@ -0,0 +1,92 @@
|
|||
[
|
||||
{
|
||||
"id": "CE1",
|
||||
"age": 7,
|
||||
"code": "CE1",
|
||||
"text": "Cours \u00e9l\u00e9mentaire 1",
|
||||
"nature": null,
|
||||
"libelle": "Cours \u00e9l\u00e9mentaire 1",
|
||||
"numOrder": "6",
|
||||
"nextLevelCode": "CE2"
|
||||
},
|
||||
{
|
||||
"id": "CE2",
|
||||
"age": 8,
|
||||
"code": "CE2",
|
||||
"text": "Cours \u00e9l\u00e9mentaire 2",
|
||||
"nature": null,
|
||||
"libelle": "Cours \u00e9l\u00e9mentaire 2",
|
||||
"numOrder": "7",
|
||||
"nextLevelCode": "CM1"
|
||||
},
|
||||
{
|
||||
"id": "CM1",
|
||||
"age": 9,
|
||||
"code": "CM1",
|
||||
"text": "Cours moyen 1",
|
||||
"nature": null,
|
||||
"libelle": "Cours moyen 1",
|
||||
"numOrder": "8",
|
||||
"nextLevelCode": "CM2"
|
||||
},
|
||||
{
|
||||
"id": "CM2",
|
||||
"age": 10,
|
||||
"code": "CM2",
|
||||
"text": "Cours moyen 2",
|
||||
"nature": null,
|
||||
"libelle": "Cours moyen 2",
|
||||
"numOrder": "9",
|
||||
"nextLevelCode": null
|
||||
},
|
||||
{
|
||||
"id": "CP",
|
||||
"age": 6,
|
||||
"code": "CP",
|
||||
"text": "Cours pr\u00e9paratoire",
|
||||
"nature": null,
|
||||
"libelle": "Cours pr\u00e9paratoire",
|
||||
"numOrder": "5",
|
||||
"nextLevelCode": "CE1"
|
||||
},
|
||||
{
|
||||
"id": "GS",
|
||||
"age": 5,
|
||||
"code": "GS",
|
||||
"text": "Section grand",
|
||||
"nature": null,
|
||||
"libelle": "Section grand",
|
||||
"numOrder": "4",
|
||||
"nextLevelCode": "CP"
|
||||
},
|
||||
{
|
||||
"id": "MS",
|
||||
"age": 4,
|
||||
"code": "MS",
|
||||
"text": "Section moyen",
|
||||
"nature": null,
|
||||
"libelle": "Section moyen",
|
||||
"numOrder": "3",
|
||||
"nextLevelCode": "GS"
|
||||
},
|
||||
{
|
||||
"id": "PS",
|
||||
"age": 3,
|
||||
"code": "PS",
|
||||
"text": "Section petit",
|
||||
"nature": null,
|
||||
"libelle": "Section petit",
|
||||
"numOrder": "2",
|
||||
"nextLevelCode": "MS"
|
||||
},
|
||||
{
|
||||
"id": "TPS",
|
||||
"age": 2,
|
||||
"code": "TPS",
|
||||
"text": "Section tout petit",
|
||||
"nature": null,
|
||||
"libelle": "Section tout petit",
|
||||
"numOrder": "1",
|
||||
"nextLevelCode": "PS"
|
||||
}
|
||||
]
|
|
@ -0,0 +1,20 @@
|
|||
[
|
||||
{
|
||||
"id": 2022,
|
||||
"text": "2022",
|
||||
"schoolYear": 2022,
|
||||
"dateEndYearSchool": "2023-07-07T00:00:00+02:00",
|
||||
"dateStartYearSchool": "2022-09-01T00:00:00+02:00",
|
||||
"dateEndSubscribeSchool": "2023-09-01T00:00:00+02:00",
|
||||
"dateStartSubscribeSchool": "2022-09-01T00:00:00+02:00"
|
||||
},
|
||||
{
|
||||
"id": 2023,
|
||||
"text": "2023",
|
||||
"schoolYear": 2023,
|
||||
"dateEndYearSchool": "2024-07-07T00:00:00+02:00",
|
||||
"dateStartYearSchool": "2023-09-04T00:00:00+02:00",
|
||||
"dateEndSubscribeSchool": "2023-09-01T00:00:00+02:00",
|
||||
"dateStartSubscribeSchool": "2022-09-01T00:00:00+02:00"
|
||||
}
|
||||
]
|
File diff suppressed because it is too large
Load Diff
|
@ -1,33 +1,9 @@
|
|||
[
|
||||
{
|
||||
"id": "105",
|
||||
"code": "105",
|
||||
"text": "AUTRE",
|
||||
"libelle": "AUTRE"
|
||||
},
|
||||
{
|
||||
"id": "30",
|
||||
"code": "30",
|
||||
"text": "B.C.G.",
|
||||
"libelle": "B.C.G."
|
||||
},
|
||||
{
|
||||
"id": "56",
|
||||
"code": "56",
|
||||
"text": "BOOSTRIX",
|
||||
"libelle": "BOOSTRIX"
|
||||
},
|
||||
{
|
||||
"id": "27",
|
||||
"code": "27",
|
||||
"text": "CHOLERA",
|
||||
"libelle": "CHOLERA"
|
||||
},
|
||||
{
|
||||
"id": "48",
|
||||
"code": "48",
|
||||
"text": "Contr\u00f4le B.C.G.",
|
||||
"libelle": "Contr\u00f4le B.C.G."
|
||||
"text": "BCG",
|
||||
"libelle": "BCG"
|
||||
},
|
||||
{
|
||||
"id": "3",
|
||||
|
@ -41,107 +17,17 @@
|
|||
"text": "DIPHTERIE",
|
||||
"libelle": "DIPHTERIE"
|
||||
},
|
||||
{
|
||||
"id": "6",
|
||||
"code": "6",
|
||||
"text": "DIPHTERIE TETANOS",
|
||||
"libelle": "DIPHTERIE TETANOS"
|
||||
},
|
||||
{
|
||||
"id": "9",
|
||||
"code": "9",
|
||||
"text": "DIPHT TETANOS COQ",
|
||||
"libelle": "DIPHT TETANOS COQ"
|
||||
},
|
||||
{
|
||||
"id": "19",
|
||||
"code": "19",
|
||||
"text": "DT BISRUDIVAX",
|
||||
"libelle": "DT BISRUDIVAX"
|
||||
},
|
||||
{
|
||||
"id": "10",
|
||||
"code": "10",
|
||||
"text": "DT COQ POLIO",
|
||||
"libelle": "DT COQ POLIO"
|
||||
},
|
||||
{
|
||||
"id": "13",
|
||||
"code": "13",
|
||||
"text": "DT COQ POLIO IPAD",
|
||||
"libelle": "DT COQ POLIO IPAD"
|
||||
},
|
||||
{
|
||||
"id": "8",
|
||||
"code": "8",
|
||||
"text": "DT POLIO",
|
||||
"libelle": "DT POLIO"
|
||||
},
|
||||
{
|
||||
"id": "45",
|
||||
"code": "45",
|
||||
"text": "DT TETANOS COQ",
|
||||
"libelle": "DT TETANOS COQ"
|
||||
},
|
||||
{
|
||||
"id": "11",
|
||||
"code": "11",
|
||||
"text": "DT TYPHOIDE",
|
||||
"libelle": "DT TYPHOIDE"
|
||||
},
|
||||
{
|
||||
"id": "129",
|
||||
"code": "129",
|
||||
"text": "ENGERIX",
|
||||
"libelle": "ENGERIX"
|
||||
},
|
||||
{
|
||||
"id": "26",
|
||||
"code": "26",
|
||||
"text": "FIEVRE JAUNE",
|
||||
"libelle": "FIEVRE JAUNE"
|
||||
},
|
||||
{
|
||||
"id": "4",
|
||||
"code": "4",
|
||||
"text": "F.TYPHOIDES",
|
||||
"libelle": "F.TYPHOIDES"
|
||||
},
|
||||
{
|
||||
"id": "144",
|
||||
"code": "144",
|
||||
"text": "GRIPPE",
|
||||
"libelle": "GRIPPE"
|
||||
},
|
||||
{
|
||||
"id": "143",
|
||||
"code": "143",
|
||||
"text": "HAEMOPHILUS HIB",
|
||||
"libelle": "HAEMOPHILUS HIB"
|
||||
},
|
||||
{
|
||||
"id": "17",
|
||||
"code": "17",
|
||||
"text": "HAVRIX",
|
||||
"libelle": "HAVRIX"
|
||||
"text": "DTPOLIO",
|
||||
"libelle": "DTPOLIO"
|
||||
},
|
||||
{
|
||||
"id": "29",
|
||||
"code": "29",
|
||||
"text": "HEPATITE B",
|
||||
"libelle": "HEPATITE B"
|
||||
},
|
||||
{
|
||||
"id": "146",
|
||||
"code": "146",
|
||||
"text": "HEXAXIM",
|
||||
"libelle": "HEXAXIM"
|
||||
},
|
||||
{
|
||||
"id": "59",
|
||||
"code": "59",
|
||||
"text": "HEXYON",
|
||||
"libelle": "HEXYON"
|
||||
"text": "HEPATITEB",
|
||||
"libelle": "HEPATITEB"
|
||||
},
|
||||
{
|
||||
"id": "16",
|
||||
|
@ -150,226 +36,28 @@
|
|||
"libelle": "HIB"
|
||||
},
|
||||
{
|
||||
"id": "24",
|
||||
"code": "24",
|
||||
"text": "IMOVAX OREILLONS",
|
||||
"libelle": "IMOVAX OREILLONS"
|
||||
"id": "152",
|
||||
"code": "152",
|
||||
"text": "IIP",
|
||||
"libelle": "IIP"
|
||||
},
|
||||
{
|
||||
"id": "121",
|
||||
"code": "121",
|
||||
"text": "INFANRIX",
|
||||
"libelle": "INFANRIX"
|
||||
"id": "151",
|
||||
"code": "151",
|
||||
"text": "MENINGOCOQUE",
|
||||
"libelle": "MENINGOCOQUE"
|
||||
},
|
||||
{
|
||||
"id": "52",
|
||||
"code": "52",
|
||||
"text": "INFANRIX HEXA",
|
||||
"libelle": "INFANRIX HEXA"
|
||||
},
|
||||
{
|
||||
"id": "32",
|
||||
"code": "32",
|
||||
"text": "INFANRIX POLIO",
|
||||
"libelle": "INFANRIX POLIO"
|
||||
},
|
||||
{
|
||||
"id": "33",
|
||||
"code": "33",
|
||||
"text": "INFANRIX POLIO HIB",
|
||||
"libelle": "INFANRIX POLIO HIB"
|
||||
},
|
||||
{
|
||||
"id": "51",
|
||||
"code": "51",
|
||||
"text": "INFANRIX QUINTA",
|
||||
"libelle": "INFANRIX QUINTA"
|
||||
},
|
||||
{
|
||||
"id": "55",
|
||||
"code": "55",
|
||||
"text": "INFANRIX TETRA",
|
||||
"libelle": "INFANRIX TETRA"
|
||||
},
|
||||
{
|
||||
"id": "147",
|
||||
"code": "147",
|
||||
"text": "INFLUVAC TETRA",
|
||||
"libelle": "INFLUVAC TETRA"
|
||||
},
|
||||
{
|
||||
"id": "137",
|
||||
"code": "137",
|
||||
"text": "INNUGRIP",
|
||||
"libelle": "INNUGRIP"
|
||||
},
|
||||
{
|
||||
"id": "18",
|
||||
"code": "18",
|
||||
"text": "LEPTOSPIROSE",
|
||||
"libelle": "LEPTOSPIROSE"
|
||||
},
|
||||
{
|
||||
"id": "22",
|
||||
"code": "22",
|
||||
"text": "MENINGITE",
|
||||
"libelle": "MENINGITE"
|
||||
},
|
||||
{
|
||||
"id": "130",
|
||||
"code": "130",
|
||||
"text": "MENINGITEC",
|
||||
"libelle": "MENINGITEC"
|
||||
},
|
||||
{
|
||||
"id": "123",
|
||||
"code": "123",
|
||||
"text": "MENINVAC",
|
||||
"libelle": "MENINVAC"
|
||||
},
|
||||
{
|
||||
"id": "120",
|
||||
"code": "120",
|
||||
"text": "MENINVACT",
|
||||
"libelle": "MENINVACT"
|
||||
},
|
||||
{
|
||||
"id": "139",
|
||||
"code": "139",
|
||||
"text": "MENJUGATE",
|
||||
"libelle": "MENJUGATE"
|
||||
},
|
||||
{
|
||||
"id": "149",
|
||||
"code": "149",
|
||||
"text": "M-M RVAX PRO",
|
||||
"libelle": "M-M RVAX PRO"
|
||||
},
|
||||
{
|
||||
"id": "133",
|
||||
"code": "133",
|
||||
"text": "MONOTEST",
|
||||
"libelle": "MONOTEST"
|
||||
},
|
||||
{
|
||||
"id": "124",
|
||||
"code": "124",
|
||||
"text": "MONOVAX",
|
||||
"libelle": "MONOVAX"
|
||||
},
|
||||
{
|
||||
"id": "132",
|
||||
"code": "132",
|
||||
"text": "NEISVAC",
|
||||
"libelle": "NEISVAC"
|
||||
},
|
||||
{
|
||||
"id": "110",
|
||||
"code": "110",
|
||||
"text": "OTITE",
|
||||
"libelle": "OTITE"
|
||||
},
|
||||
{
|
||||
"id": "134",
|
||||
"code": "134",
|
||||
"text": "PANENZA",
|
||||
"libelle": "PANENZA"
|
||||
},
|
||||
{
|
||||
"id": "31",
|
||||
"code": "31",
|
||||
"text": "PENTACOQ",
|
||||
"libelle": "PENTACOQ"
|
||||
},
|
||||
{
|
||||
"id": "53",
|
||||
"code": "53",
|
||||
"text": "PENTAVAC",
|
||||
"libelle": "PENTAVAC"
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
"code": "2",
|
||||
"text": "POLIOMYELITE",
|
||||
"libelle": "POLIOMYELITE"
|
||||
},
|
||||
{
|
||||
"id": "128",
|
||||
"code": "128",
|
||||
"text": "PREVENAR",
|
||||
"libelle": "PREVENAR"
|
||||
},
|
||||
{
|
||||
"id": "125",
|
||||
"code": "125",
|
||||
"text": "PRIORIX",
|
||||
"libelle": "PRIORIX"
|
||||
},
|
||||
{
|
||||
"id": "54",
|
||||
"code": "54",
|
||||
"text": "REPEVAX",
|
||||
"libelle": "REPEVAX"
|
||||
},
|
||||
{
|
||||
"id": "47",
|
||||
"code": "47",
|
||||
"text": "REVAXIS",
|
||||
"libelle": "REVAXIS"
|
||||
"id": "150",
|
||||
"code": "150",
|
||||
"text": "POLIO",
|
||||
"libelle": "POLIO"
|
||||
},
|
||||
{
|
||||
"id": "28",
|
||||
"code": "28",
|
||||
"text": "R O R",
|
||||
"libelle": "R O R"
|
||||
},
|
||||
{
|
||||
"id": "127",
|
||||
"code": "127",
|
||||
"text": "ROR VAX",
|
||||
"libelle": "ROR VAX"
|
||||
},
|
||||
{
|
||||
"id": "135",
|
||||
"code": "135",
|
||||
"text": "ROTARIX",
|
||||
"libelle": "ROTARIX"
|
||||
},
|
||||
{
|
||||
"id": "20",
|
||||
"code": "20",
|
||||
"text": "ROUVAX",
|
||||
"libelle": "ROUVAX"
|
||||
},
|
||||
{
|
||||
"id": "23",
|
||||
"code": "23",
|
||||
"text": "RUDI ROUVAX",
|
||||
"libelle": "RUDI ROUVAX"
|
||||
},
|
||||
{
|
||||
"id": "21",
|
||||
"code": "21",
|
||||
"text": "RUDIVAX",
|
||||
"libelle": "RUDIVAX"
|
||||
},
|
||||
{
|
||||
"id": "113",
|
||||
"code": "113",
|
||||
"text": "SCARLATINE",
|
||||
"libelle": "SCARLATINE"
|
||||
},
|
||||
{
|
||||
"id": "14",
|
||||
"code": "14",
|
||||
"text": "SERUM ANTI-TETANIQUE",
|
||||
"libelle": "SERUM ANTI-TETANIQUE"
|
||||
},
|
||||
{
|
||||
"id": "141",
|
||||
"code": "141",
|
||||
"text": "SYNAGIS",
|
||||
"libelle": "SYNAGIS"
|
||||
"text": "ROR",
|
||||
"libelle": "ROR"
|
||||
},
|
||||
{
|
||||
"id": "1",
|
||||
|
@ -377,46 +65,10 @@
|
|||
"text": "TETANOS",
|
||||
"libelle": "TETANOS"
|
||||
},
|
||||
{
|
||||
"id": "7",
|
||||
"code": "7",
|
||||
"text": "TETANOS POLIO",
|
||||
"libelle": "TETANOS POLIO"
|
||||
},
|
||||
{
|
||||
"id": "12",
|
||||
"code": "12",
|
||||
"text": "TETRA COQ",
|
||||
"libelle": "TETRA COQ"
|
||||
},
|
||||
{
|
||||
"id": "46",
|
||||
"code": "46",
|
||||
"text": "TETRAVAC ACELLULAIRE",
|
||||
"libelle": "TETRAVAC ACELLULAIRE"
|
||||
},
|
||||
{
|
||||
"id": "107",
|
||||
"code": "107",
|
||||
"text": "VARICELLE",
|
||||
"libelle": "VARICELLE"
|
||||
},
|
||||
{
|
||||
"id": "15",
|
||||
"code": "15",
|
||||
"text": "VARIOLE",
|
||||
"libelle": "VARIOLE"
|
||||
},
|
||||
{
|
||||
"id": "34",
|
||||
"code": "34",
|
||||
"text": "VAXELIS",
|
||||
"libelle": "VAXELIS"
|
||||
},
|
||||
{
|
||||
"id": "148",
|
||||
"code": "148",
|
||||
"text": "VAXIGRIP",
|
||||
"libelle": "VAXIGRIP"
|
||||
"text": "TETRACOQ",
|
||||
"libelle": "TETRACOQ"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -7,7 +7,9 @@
|
|||
"dateBirth": "1970-01-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": false,
|
||||
|
@ -16,7 +18,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "SIMPSON",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
|
@ -38,7 +40,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "SIMPSON",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
@ -94,13 +96,13 @@
|
|||
"hospital": "Springfield General Hospital",
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
{
|
||||
"familyDoctor": {
|
||||
"name": "HIBBERT",
|
||||
"phone": "0656785678",
|
||||
"address": {
|
||||
"street1": "General Hospital",
|
||||
"zipcode": "90701",
|
||||
"town": "Springfield"
|
||||
}
|
||||
},
|
||||
"allergy1": null,
|
||||
"allergy2": null,
|
||||
"comment1": null,
|
||||
"comment2": null,
|
||||
"observ1": null,
|
||||
"observ2": null,
|
||||
"isAuthHospital": true,
|
||||
"hospital": "Springfield General Hospital",
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -10,13 +10,13 @@
|
|||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "SIMPSON",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
"category": "BI",
|
||||
"situation": "MARI",
|
||||
"flagCom": true,
|
||||
"nbChild": null,
|
||||
"nbTotalChild": null,
|
||||
"nbAES": null,
|
||||
"nbChild": 3,
|
||||
"nbTotalChild": 4,
|
||||
"nbAES": "1",
|
||||
"RL1": {
|
||||
"num": "N/A",
|
||||
"firstname": "MARGE",
|
||||
|
@ -17,7 +17,9 @@
|
|||
"dateBirth": "1950-10-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": "2317",
|
||||
|
@ -25,8 +27,8 @@
|
|||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Springfield",
|
||||
"zipcode": "62701",
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
|
@ -54,9 +56,13 @@
|
|||
"civility": "MR",
|
||||
"birth": {
|
||||
"dateBirth": "1956-05-12T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
|
@ -125,9 +131,13 @@
|
|||
"sexe": "M",
|
||||
"birth": {
|
||||
"dateBirth": "2014-04-01T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"place": "Brive-la-Gaillarde",
|
||||
"communeCode": "19031",
|
||||
"countryCode": null,
|
||||
"cdDepartment": "19",
|
||||
"communeCode_text": "BRIVE-LA-GAILLARDE",
|
||||
"cdDepartment_text": "CORREZE",
|
||||
"zipCode": "19100"
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": true,
|
||||
|
@ -136,7 +146,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "SIMPSON",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "ABRAHAM JEBEDIAH",
|
||||
"dateBirth": "1927-05-24T00:00:00+01:00",
|
||||
"civility": "MR",
|
||||
|
@ -158,7 +168,7 @@
|
|||
{
|
||||
"personInfo": {
|
||||
"num": "N/A",
|
||||
"lastname": "SIMPSON",
|
||||
"lastname": "TEST_SIMPSON",
|
||||
"firstname": "MONA PENELOPE",
|
||||
"dateBirth": "1929-03-15T00:00:00Z",
|
||||
"civility": "MME",
|
||||
|
@ -214,13 +224,13 @@
|
|||
"hospital": "Springfield General Hospital",
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
@ -250,7 +260,8 @@
|
|||
"dateBirth": "2016-05-09T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_SV",
|
||||
"bPhoto": false,
|
||||
|
@ -283,7 +294,8 @@
|
|||
"dateBirth": "2018-12-17T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_PAI",
|
||||
"bPhoto": false,
|
||||
|
@ -316,7 +328,8 @@
|
|||
"dateBirth": "2018-04-01T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": false,
|
||||
|
|
|
@ -2,10 +2,10 @@
|
|||
"number": "N/A",
|
||||
"category": "AUTR",
|
||||
"situation": "AUTR",
|
||||
"flagCom": true,
|
||||
"nbChild": null,
|
||||
"nbTotalChild": null,
|
||||
"nbAES": null,
|
||||
"flagCom": false,
|
||||
"nbChild": 0,
|
||||
"nbTotalChild": 0,
|
||||
"nbAES": "0",
|
||||
"RL1": {
|
||||
"num": "N/A",
|
||||
"firstname": "MARGE",
|
||||
|
@ -17,7 +17,8 @@
|
|||
"dateBirth": "1950-10-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
|
@ -55,7 +56,9 @@
|
|||
"dateBirth": "1956-05-12T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
|
@ -111,7 +114,9 @@
|
|||
"dateBirth": "1970-01-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
},
|
||||
"dietcode": null,
|
||||
"bPhoto": false,
|
||||
|
@ -148,13 +153,13 @@
|
|||
"hospital": null,
|
||||
"vaccinList": [
|
||||
{
|
||||
"code": "24",
|
||||
"libelle": "IMOVAX OREILLONS",
|
||||
"code": "1",
|
||||
"libelle": "TETANOS",
|
||||
"vaccinationDate": "2022-02-22T00:00:00+01:00"
|
||||
},
|
||||
{
|
||||
"code": "45",
|
||||
"libelle": "DT TETANOS COQ",
|
||||
"code": "8",
|
||||
"libelle": "DTPOLIO",
|
||||
"vaccinationDate": "2011-01-11T00:00:00+01:00"
|
||||
}
|
||||
]
|
||||
|
@ -183,7 +188,8 @@
|
|||
"dateBirth": "2016-05-09T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_SV",
|
||||
"bPhoto": false,
|
||||
|
@ -216,7 +222,8 @@
|
|||
"dateBirth": "2018-12-17T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_PAI",
|
||||
"bPhoto": false,
|
||||
|
@ -249,7 +256,8 @@
|
|||
"dateBirth": "2018-04-01T00:00:00+02:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"dietcode": "MENU_AV",
|
||||
"bPhoto": false,
|
||||
|
|
|
@ -1,27 +1,36 @@
|
|||
[
|
||||
{
|
||||
"yearRev": 2021,
|
||||
"dateStart": "2022-01-02T00:00:00+01:00",
|
||||
"dateEnd": "2022-12-31T00:00:00+01:00",
|
||||
"mtt": 1500.33,
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
},
|
||||
{
|
||||
"yearRev": 2020,
|
||||
"dateStart": "2022-01-02T00:00:00+01:00",
|
||||
"dateEnd": "2022-12-31T00:00:00+01:00",
|
||||
"mtt": 1500.33,
|
||||
"cdquo": "1",
|
||||
"cdquo": "2",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
"cdquo_text": "Revenus Petite enfance"
|
||||
},
|
||||
{
|
||||
"yearRev": 2021,
|
||||
"dateStart": "2022-01-01T00:00:00+01:00",
|
||||
"dateEnd": "2022-01-01T00:00:00+01:00",
|
||||
"mtt": 1500.33,
|
||||
"cdquo": "2",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus Petite enfance"
|
||||
},
|
||||
{
|
||||
"yearRev": 2021,
|
||||
"dateStart": "2022-01-02T00:00:00+01:00",
|
||||
"dateEnd": "2022-12-31T00:00:00+01:00",
|
||||
"mtt": 1500.33,
|
||||
"cdquo": "2",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus Petite enfance"
|
||||
},
|
||||
{
|
||||
"yearRev": 2022,
|
||||
"dateStart": "2022-09-01T00:00:00+02:00",
|
||||
"dateEnd": "3000-08-31T00:00:00+02:00",
|
||||
"mtt": 5000.0,
|
||||
"cdquo": "1",
|
||||
"codeUti": null,
|
||||
"cdquo_text": "Revenus fiscaux"
|
||||
|
|
|
@ -9,7 +9,8 @@
|
|||
"dateBirth": "1950-10-01T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": null,
|
||||
"cdDepartment": null
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": "2317",
|
||||
|
@ -17,8 +18,8 @@
|
|||
"numComp": null,
|
||||
"street1": "RUE ACHILLE VIADIEU",
|
||||
"street2": null,
|
||||
"town": "Springfield",
|
||||
"zipcode": "62701",
|
||||
"town": "Toulouse",
|
||||
"zipcode": "31400",
|
||||
"idStreet_text": "RUE ACHILLE VIADIEU"
|
||||
},
|
||||
"contact": {
|
||||
|
|
|
@ -9,7 +9,9 @@
|
|||
"dateBirth": "1956-05-12T00:00:00+01:00",
|
||||
"place": null,
|
||||
"communeCode": null,
|
||||
"countryCode": null
|
||||
"countryCode": "404",
|
||||
"cdDepartment": null,
|
||||
"countryCode_text": "USA"
|
||||
},
|
||||
"adresse": {
|
||||
"idStreet": null,
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import argparse
|
||||
|
||||
import requests
|
||||
import zeep
|
||||
from zeep.transports import Transport
|
||||
from zeep.wsse.username import UsernameToken
|
||||
|
||||
WSSE = UsernameToken('maelis-webservice', 'maelis-password')
|
||||
WSDL_URL = 'https://demo-toulouse.sigec.fr/maelisws-toulouse-recette/services/FamilyService?wsdl'
|
||||
|
||||
|
||||
def read_family(family_id, verbose):
|
||||
session = requests.Session()
|
||||
session.verify = False
|
||||
transport = Transport(session=session)
|
||||
settings = zeep.Settings(strict=False, xsd_ignore_sequence_order=True)
|
||||
client = zeep.Client(WSDL_URL, transport=transport, wsse=WSSE, settings=settings)
|
||||
|
||||
result = client.service.readFamily(
|
||||
dossierNumber=family_id,
|
||||
# schoolYear=
|
||||
# incomeYear=2022, # <-- pour filtrer les quotients sur cette année
|
||||
# referenceYear=2020,
|
||||
)
|
||||
print(result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--verbose', '-v', type=int, default=2, help='display errors')
|
||||
parser.add_argument('family_id', help='196544', nargs='?', default='196544')
|
||||
args = parser.parse_args()
|
||||
read_family(args.family_id, verbose=args.verbose)
|
|
@ -5,20 +5,27 @@ from .conftest import diff
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"ref",
|
||||
'ref',
|
||||
[
|
||||
'ape-indicators',
|
||||
'category',
|
||||
'child-indicator',
|
||||
'civility',
|
||||
'country',
|
||||
'county',
|
||||
'csp',
|
||||
'dietcode',
|
||||
'document',
|
||||
'exemption-reasons',
|
||||
#'nursery',
|
||||
'organ',
|
||||
'pai',
|
||||
'quality',
|
||||
'quotient',
|
||||
#'regie',
|
||||
'rl-indicator',
|
||||
'school-levels',
|
||||
'school-years',
|
||||
'situation',
|
||||
'street',
|
||||
'vaccin',
|
||||
|
@ -34,4 +41,5 @@ def test_referentials(conn, referentials, ref):
|
|||
for item in res['data']:
|
||||
assert 'id' in item
|
||||
assert 'text' in item
|
||||
assert diff(res['data'], 'test_read_%s_list.json' % ref)
|
||||
if ref not in ['street', 'county', 'nursery']:
|
||||
assert diff(res['data'], 'test_read_%s_list.json' % ref)
|
|
@ -34,12 +34,9 @@ def test_link(conn, update_data):
|
|||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res == {
|
||||
'err': 1,
|
||||
'err_class': 'passerelle.utils.jsonresponse.APIError',
|
||||
'err_desc': 'E02 : Le dossier numéro [999999] ne correspond à aucune famille',
|
||||
'data': None,
|
||||
}
|
||||
assert res['err'] == 1
|
||||
assert res['err_class'] == 'passerelle.utils.soap.SOAPFault'
|
||||
assert 'E02 : Le dossier numéro [999999] ne correspond à aucune famille' in res['err_desc']
|
||||
|
||||
# wrong DUI firstname
|
||||
payload = {
|
|
@ -9,21 +9,33 @@ from .conftest import diff, diff_child, diff_family, diff_rlg, link, read_family
|
|||
FAMILY_RESET_PAYLOAD = {
|
||||
'category': 'AUTR',
|
||||
'situation': 'AUTR',
|
||||
'nbChild': '',
|
||||
'nbTotalChild': '',
|
||||
'nbAES': '',
|
||||
'rl1': {
|
||||
'civility': 'MR', # no effect
|
||||
'firstname': 'Marge', # must be
|
||||
'lastname': 'Simpson', # must be
|
||||
'lastname': 'Test_Simpson', # must be
|
||||
'maidenName': 'reset', # no effect
|
||||
'quality': 'AU',
|
||||
'birth': {'dateBirth': '1950-10-01'}, # must be
|
||||
'birth': {
|
||||
'dateBirth': '1950-10-01', # must be
|
||||
'countryCode': '',
|
||||
},
|
||||
'adresse': {'idStreet': '', 'street1': 'reset', 'town': 'reset', 'zipcode': 'reset'},
|
||||
},
|
||||
'rl2': {
|
||||
'civility': 'MME', # no effect
|
||||
'firstname': 'Homer', # must be
|
||||
'lastname': 'Simpson', # must be
|
||||
'lastname': 'Test_Simpson', # must be
|
||||
'quality': 'AU',
|
||||
'birth': {'dateBirth': '1956-05-12'}, # must be
|
||||
'birth': {
|
||||
'dateBirth': '1956-05-12', # must be
|
||||
'place': '',
|
||||
'communeCode': '',
|
||||
'cdDepartment': '',
|
||||
'countryCode': '404',
|
||||
},
|
||||
'adresse': {
|
||||
'num': '42',
|
||||
'numComp': 'Q',
|
||||
|
@ -68,7 +80,13 @@ FAMILY_RESET_PAYLOAD = {
|
|||
'sexe': 'F',
|
||||
'firstname': 'Bartolome', # some side effects, cf test_update_child
|
||||
'lastname': 'Simps',
|
||||
'birth': {'dateBirth': '1970-01-01'},
|
||||
'birth': {
|
||||
'dateBirth': '1970-01-01',
|
||||
'place': '',
|
||||
'communeCode': '',
|
||||
'cdDepartment': '',
|
||||
'countryCode': '404',
|
||||
},
|
||||
'bPhoto': False,
|
||||
'bLeaveAlone': False,
|
||||
'dietcode': '',
|
||||
|
@ -218,13 +236,18 @@ def test_update_family(conn, update_data):
|
|||
|
||||
def test_create_family(conn, create_data, update_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
# search the 'Test_Simpson' default test family
|
||||
resp = requests.get(conn + '/search-family?q=Test_Simpson')
|
||||
resp.raise_for_status()
|
||||
assert len(resp.json()['data']) >= 1
|
||||
assert any(data['RL1']['lastname'] == 'TEST_SIMPSON' for data in resp.json()['data'])
|
||||
|
||||
url = conn + '/create-family?NameID=%s' % create_data['name_id']
|
||||
|
||||
# RL1 already exists (on update_data) error
|
||||
unlink(conn, create_data['name_id'])
|
||||
payload = copy.deepcopy(create_data['family_payload'])
|
||||
payload['rl1']['lastname'] = 'Simpson'
|
||||
payload['rl1']['lastname'] = 'Test_Simpson'
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
|
@ -245,7 +268,7 @@ def test_create_family(conn, create_data, update_data):
|
|||
|
||||
def test_is_rl_exists(conn, update_data):
|
||||
url = conn + '/is-rl-exists'
|
||||
payload = {'firstname': 'Marge', 'lastname': 'Simpson', 'dateBirth': '1950-10-01'}
|
||||
payload = {'firstname': 'Marge', 'lastname': 'Test_Simpson', 'dateBirth': '1950-10-01'}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'err': 0, 'data': True}
|
||||
|
@ -262,7 +285,7 @@ def test_is_rl_exists(conn, update_data):
|
|||
assert resp.json() == {'err': 0, 'data': False}
|
||||
|
||||
# test on rl2
|
||||
payload = {'firstname': 'Homer', 'lastname': 'Simpson', 'dateBirth': '1956-05-12'}
|
||||
payload = {'firstname': 'Homer', 'lastname': 'Test_Simpson', 'dateBirth': '1956-05-12'}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'err': 0, 'data': True}
|
||||
|
@ -286,7 +309,7 @@ def test_create_rl2(conn, create_data, update_data):
|
|||
assert diff_rlg(conn, create_data['name_id'], 2, 'test_create_rl2.json')
|
||||
|
||||
|
||||
@pytest.mark.parametrize("rl", ['1', '2'])
|
||||
@pytest.mark.parametrize('rl', ['1', '2'])
|
||||
def test_update_rlg(conn, update_data, rl):
|
||||
rlg = 'rl' + rl
|
||||
RLG = 'RL' + rl
|
||||
|
@ -347,7 +370,7 @@ def test_update_rlg(conn, update_data, rl):
|
|||
in res['err_desc']
|
||||
)
|
||||
else:
|
||||
assert "La date de naissance ne peut pas être modifiée" in res['err_desc']
|
||||
assert 'La date de naissance ne peut pas être modifiée' in res['err_desc']
|
||||
|
||||
# restore RL1
|
||||
payload = copy.deepcopy(update_data['family_payload'][rlg])
|
||||
|
@ -431,28 +454,18 @@ def test_create_child(conn, create_data, update_data):
|
|||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert 'Il existe déjà un enfant correspondant' in res['err_desc']
|
||||
res['err_desc'] = 'N/A'
|
||||
assert res == {
|
||||
'err': 1,
|
||||
'err_class': 'passerelle.utils.jsonresponse.APIError',
|
||||
'err_desc': 'N/A',
|
||||
'data': None,
|
||||
}
|
||||
assert res['err'] == 1
|
||||
assert res['err_class'] == 'passerelle.utils.soap.SOAPFault'
|
||||
assert 'E65 : Il existe déjà un enfant correspondant' in res['err_desc']
|
||||
|
||||
# child already exists error (Lisa form update_data)
|
||||
payload['lastname'] = 'Simpson'
|
||||
payload['lastname'] = 'Test_Simpson'
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert 'Il existe déjà un enfant correspondant' in res['err_desc']
|
||||
res['err_desc'] = 'N/A'
|
||||
assert res == {
|
||||
'err': 1, # error is return into childErrorList
|
||||
'err_class': 'passerelle.utils.jsonresponse.APIError',
|
||||
'err_desc': 'N/A',
|
||||
'data': None,
|
||||
}
|
||||
assert res['err'] == 1
|
||||
assert res['err_class'] == 'passerelle.utils.jsonresponse.APIError'
|
||||
assert 'E65a : Il existe déjà un enfant correspondant' in res['err_desc']
|
||||
|
||||
|
||||
def test_update_child(conn, update_data, create_data):
|
||||
|
@ -605,6 +618,24 @@ def test_update_child_medical_record(conn, update_data):
|
|||
update_data['bart_num'],
|
||||
)
|
||||
|
||||
# update only doctor
|
||||
# #2720: allergies comments, and observations are erased
|
||||
payload = {
|
||||
'familyDoctor': {
|
||||
'name': 'Hibbert',
|
||||
'phone': '0656785678',
|
||||
'address': {
|
||||
'street1': 'General Hospital',
|
||||
'zipcode': '90701',
|
||||
'town': 'Springfield',
|
||||
},
|
||||
},
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert diff_child(conn, update_data['name_id'], 0, 'test_update_child_doctor.json', key='medicalRecord')
|
||||
|
||||
# reset medical record
|
||||
payload = FAMILY_RESET_PAYLOAD['childList'][0]['medicalRecord']
|
||||
resp = requests.post(url, json=payload)
|
||||
|
@ -768,21 +799,22 @@ def test_update_quotient(conn, create_data):
|
|||
'dateStart': '2022-01-01',
|
||||
'dateEnd': '2022-12-31',
|
||||
'mtt': '1500.33',
|
||||
'cdquo': '1',
|
||||
'cdquo': '2',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = read_family(conn, create_data['name_id'])
|
||||
assert data['RL1']['quotientList'] == [
|
||||
assert len(data['RL1']['quotientList']) == 2
|
||||
assert data['RL1']['quotients']['2'] == [
|
||||
{
|
||||
'yearRev': 2021,
|
||||
'dateStart': '2022-01-01T00:00:00+01:00',
|
||||
'dateEnd': '2022-12-31T00:00:00+01:00',
|
||||
'mtt': 1500.33,
|
||||
'cdquo': '1',
|
||||
'cdquo': '2',
|
||||
'codeUti': None,
|
||||
'cdquo_text': 'Revenus fiscaux',
|
||||
'cdquo_text': 'Revenus Petite enfance',
|
||||
}
|
||||
]
|
||||
|
||||
|
@ -792,7 +824,7 @@ def test_update_quotient(conn, create_data):
|
|||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = read_family(conn, create_data['name_id'])
|
||||
assert len(data['RL1']['quotientList']) == 2
|
||||
assert len(data['RL1']['quotients']['2']) == 2
|
||||
|
||||
# add quotient on another income year
|
||||
payload['yearRev'] = '2020'
|
||||
|
@ -800,7 +832,7 @@ def test_update_quotient(conn, create_data):
|
|||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = diff_rlg(conn, create_data['name_id'], 1, 'test_update_quotient.json', 'quotientList')
|
||||
assert len(data['RL1']['quotientList']) == 3
|
||||
assert len(data['RL1']['quotients']['2']) == 3
|
||||
|
||||
# test read-family with reference year
|
||||
url = conn + '/read-family?NameID=%s&income_year=%s' % (create_data['name_id'], '2020')
|
||||
|
@ -900,7 +932,7 @@ def test_read_family_members(conn, update_data):
|
|||
assert res['data']['personInfo']['firstname'] == 'ABRAHAM JEBEDIAH'
|
||||
|
||||
|
||||
def test_add_supplied_document(conn, create_data):
|
||||
def test_supplied_document(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
|
@ -908,6 +940,8 @@ def test_add_supplied_document(conn, create_data):
|
|||
payload = {
|
||||
'documentList/0/code': '46',
|
||||
'documentList/0/depositDate': '2022-12-20',
|
||||
'documentList/0/visaDate': '2022-12-21',
|
||||
'documentList/0/validityDate': '2022-12-22',
|
||||
'documentList/0/file': { # w.c.s. file field
|
||||
'filename': '201x201.jpg',
|
||||
'content_type': 'image/jpeg',
|
||||
|
@ -921,6 +955,7 @@ def test_add_supplied_document(conn, create_data):
|
|||
assert res['err'] == 0
|
||||
|
||||
# push on RL
|
||||
payload['documentList/0/code'] = '85'
|
||||
payload['numPerson'] = create_data['rl1_num']
|
||||
url = conn + '/add-supplied-document?NameID=%s' % create_data['name_id']
|
||||
resp = requests.post(url, json=payload)
|
||||
|
@ -928,10 +963,44 @@ def test_add_supplied_document(conn, create_data):
|
|||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
||||
# push on childe
|
||||
# push on child
|
||||
payload['documentList/0/code'] = '69'
|
||||
payload['numPerson'] = create_data['bart_num']
|
||||
url = conn + '/add-supplied-document?NameID=%s' % create_data['name_id']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
||||
# check validity on family
|
||||
params = {
|
||||
'code': '46',
|
||||
'ref_date': '2022-12-22',
|
||||
}
|
||||
url = conn + '/read-supplied-document-validity?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
||||
# check validity on RL
|
||||
params = {
|
||||
'code': '85',
|
||||
'person_id': create_data['rl1_num'],
|
||||
'ref_date': '2022-12-22',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
|
||||
# check validity on child
|
||||
params = {
|
||||
'code': '69',
|
||||
'person_id': create_data['bart_num'],
|
||||
'ref_date': '2022-12-22',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
|
@ -0,0 +1,192 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import link, unlink
|
||||
|
||||
|
||||
def test_create_nursery_demand_on_existing_child(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/get-nursery-geojson'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
nurseries = resp.json()['features']
|
||||
assert len(nurseries) >= 2
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
nb_childs = len(res['data']['childList'])
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == []
|
||||
|
||||
url = conn + '/read-child?NameID=%s&child_id=%s' % (create_data['name_id'], create_data['maggie_num'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == []
|
||||
|
||||
url = conn + '/create-nursery-demand'
|
||||
payload = {
|
||||
'family_id': create_data['family_id'],
|
||||
'family_indicators/0/code': 'APE_FIRSTC',
|
||||
'family_indicators/0/isActive': True,
|
||||
'child_id': create_data['maggie_num'],
|
||||
'demand_indicators/0/code': 'APE_COMPO1',
|
||||
'demand_indicators/0/isActive': True,
|
||||
'start_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
'number_of_days': '2',
|
||||
'start_hour_Mon': '08:00',
|
||||
'end_hour_Mon': '',
|
||||
'comment': 'bla',
|
||||
'accept_other_nurseries': True,
|
||||
'nursery1/idActivity': nurseries[0]['properties']['activity_id'],
|
||||
'nursery1/idUnit': nurseries[0]['properties']['unit_id'],
|
||||
'nursery1/idPlace': nurseries[0]['properties']['place_id'],
|
||||
'nursery2/idActivity': nurseries[1]['properties']['activity_id'],
|
||||
'nursery2/idUnit': nurseries[1]['properties']['unit_id'],
|
||||
'nursery2/idPlace': nurseries[1]['properties']['place_id'],
|
||||
'nursery3/idActivity': '',
|
||||
'nursery3/idUnit': '',
|
||||
'nursery3/idPlace': '',
|
||||
# indicators
|
||||
'child_indicators/0/code': 'APE_HBOTH',
|
||||
'child_indicators/0/isActive': True,
|
||||
'child_indicators/1/code': 'APE_HPAR',
|
||||
'child_indicators/1/isActive': True,
|
||||
'child_indicators/2/code': 'APE_COMPO3',
|
||||
'child_indicators/2/isActive': True,
|
||||
'child_indicators/3/code': 'APE_MULTIACC',
|
||||
'child_indicators/3/isActive': True,
|
||||
'family_indicators/0/code': 'APE_COMPO4',
|
||||
'family_indicators/0/isActive': True,
|
||||
'family_indicators/1/code': 'APE_NAIM',
|
||||
'family_indicators/1/isActive': True,
|
||||
'family_indicators/2/code': 'APE_FIRSTC',
|
||||
'family_indicators/2/isActive': True,
|
||||
'family_indicators/3/code': 'APE_COMPO2',
|
||||
'family_indicators/3/isActive': True,
|
||||
'family_indicators/4/code': 'APE_HAND',
|
||||
'family_indicators/4/isActive': True,
|
||||
'demand_indicators/0/code': 'APE_FRAT',
|
||||
'demand_indicators/0/isActive': True,
|
||||
'demand_indicators/1/code': 'APE_COMPO1',
|
||||
'demand_indicators/1/isActive': True,
|
||||
'demand_indicators/2/code': 'APE_HFRAT',
|
||||
'demand_indicators/2/isActive': True,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'data': None, 'err': 0}
|
||||
|
||||
# no child added
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert len(res['data']['childList']) == nb_childs
|
||||
|
||||
# check indicators
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == [
|
||||
'APE_COMPO2',
|
||||
'APE_COMPO4',
|
||||
'APE_FIRSTC',
|
||||
'APE_HAND',
|
||||
'APE_NAIM',
|
||||
]
|
||||
|
||||
url = conn + '/read-child?NameID=%s&child_id=%s' % (create_data['name_id'], create_data['maggie_num'])
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == [
|
||||
'APE_COMPO3',
|
||||
'APE_HBOTH',
|
||||
'APE_HPAR',
|
||||
'APE_MULTIACC',
|
||||
]
|
||||
|
||||
|
||||
def test_create_nursery_demand_adding_new_child(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/get-nursery-geojson'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
nurseries = resp.json()['features']
|
||||
assert len(nurseries) >= 2
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
nb_childs = len(res['data']['childList'])
|
||||
assert 'NELSON' not in [x['firstname'] for x in res['data']['childList']]
|
||||
|
||||
url = conn + '/create-nursery-demand'
|
||||
payload = {
|
||||
'family_id': create_data['family_id'],
|
||||
'child_first_name': 'Nelson',
|
||||
'child_last_name': 'Muntz',
|
||||
'child_birthdate': '2013-10-31',
|
||||
'child_gender': 'G',
|
||||
'start_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
'nursery1/idActivity': nurseries[0]['properties']['activity_id'],
|
||||
'nursery1/idUnit': nurseries[0]['properties']['unit_id'],
|
||||
'nursery1/idPlace': nurseries[0]['properties']['place_id'],
|
||||
'nursery2/idActivity': nurseries[1]['properties']['activity_id'],
|
||||
'nursery2/idUnit': nurseries[1]['properties']['unit_id'],
|
||||
'nursery2/idPlace': nurseries[1]['properties']['place_id'],
|
||||
'nursery3/idActivity': '',
|
||||
'nursery3/idUnit': '',
|
||||
'nursery3/idPlace': '',
|
||||
# indicators
|
||||
'child_indicators/0/code': 'APE_HBOTH',
|
||||
'child_indicators/0/isActive': True,
|
||||
'child_indicators/1/code': 'APE_HPAR',
|
||||
'child_indicators/1/isActive': True,
|
||||
'child_indicators/2/code': 'APE_COMPO3',
|
||||
'child_indicators/2/isActive': True,
|
||||
'child_indicators/3/code': 'APE_MULTIACC',
|
||||
'child_indicators/3/isActive': True,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert res['err'] == 0
|
||||
child_id = resp.json()['data']
|
||||
assert child_id is not None
|
||||
|
||||
# a new child is created on family
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert len(res['data']['childList']) == nb_childs + 1
|
||||
assert 'NELSON' in [x['firstname'] for x in res['data']['childList']]
|
||||
assert res['data']['childList'][nb_childs]['num'] == child_id
|
||||
|
||||
# check child indicators
|
||||
url = conn + '/read-child?NameID=%s&child_id=%s' % (create_data['name_id'], child_id)
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
assert res['data']['firstname'] == 'NELSON'
|
||||
assert sorted(x['code'] for x in res['data']['indicatorList']) == [
|
||||
'APE_COMPO3',
|
||||
'APE_HBOTH',
|
||||
'APE_HPAR',
|
||||
'APE_MULTIACC',
|
||||
]
|
|
@ -0,0 +1,308 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def school_year(conn):
|
||||
url = conn + '/read-school-years-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
year = res['data'][0]['text']
|
||||
return year
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def exemption(conn):
|
||||
# get an exemption code
|
||||
url = conn + '/read-exemption-reasons-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
return res['data'][0]['id']
|
||||
|
||||
|
||||
def test_displaying_school_subscribed(conn, create_data, school_year, exemption):
|
||||
"""
|
||||
Read-family ramène les inscriptions aux date de visualisation paramétrées
|
||||
sur le référential YearSchool
|
||||
"""
|
||||
school_year = str(int(school_year) + 1)
|
||||
|
||||
# create a 7 year-old child
|
||||
url = conn + '/create-child?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'sexe': 'F',
|
||||
'firstname': 'Claris',
|
||||
'lastname': create_data['lastname'],
|
||||
'birth': {'dateBirth': '2016-09-12'},
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
claris_id = str(resp.json()['data']['child_id'])
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration'
|
||||
payload = {
|
||||
'numPerson': claris_id,
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CE1',
|
||||
'dateSubscribe': school_year + '-01-01',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data']['returnMessage'] is None
|
||||
assert resp.json()['data']['subscribeSchoolBean']['schoolName'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
assert resp.json()['data']['subscribeSchoolBean']['adresse'] == '101 GRANDE-RUE SAINT MICHEL'
|
||||
|
||||
# get Claris school from read-family
|
||||
url = conn + '/read-school-years-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()['data']
|
||||
date_start = [x['dateStartYearSchool'] for x in res if x['text'] == school_year][0]
|
||||
assert date_start[10] > datetime.datetime.now().strftime('%Y-%m-%d')
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
schools = [x['subscribeSchoolList'] for x in res['data']['childList'] if x['num'] == claris_id][0]
|
||||
assert len(schools) == 0 # school is filtered, but it is related to an hidden school year
|
||||
# field, not dateStartYearSchool, checked before : #2425
|
||||
|
||||
|
||||
def test_school_pre_registration_by_sector(conn, create_data, school_year, exemption):
|
||||
"""
|
||||
Pré-inscription de l'enfant de 7 ans dans son secteur
|
||||
"""
|
||||
# create a 7 year-old child
|
||||
url = conn + '/create-child?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'sexe': 'F',
|
||||
'firstname': 'Sego',
|
||||
'lastname': create_data['lastname'],
|
||||
'birth': {'dateBirth': '2016-05-09'},
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
sego_id = str(resp.json()['data']['child_id'])
|
||||
|
||||
# assert there is a school at this address
|
||||
url = conn + '/read-schools-for-address-and-level'
|
||||
params = {
|
||||
'id_street': '2317',
|
||||
'num': '4',
|
||||
'year': school_year,
|
||||
'level': 'CE1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['text'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
|
||||
# assert there is a school at child address
|
||||
url = conn + '/read-schools-for-child-and-level'
|
||||
params = {
|
||||
'child_id': sego_id,
|
||||
'year': school_year,
|
||||
'level': 'CE1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['text'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
school_id = resp.json()['data'][0]['idSchool']
|
||||
assert school_id == '2435'
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration'
|
||||
payload = {
|
||||
'numPerson': sego_id,
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CE1',
|
||||
'dateSubscribe': school_year + '-01-01',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data']['returnMessage'] is None
|
||||
assert resp.json()['data']['subscribeSchoolBean']['schoolName'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
assert resp.json()['data']['subscribeSchoolBean']['adresse'] == '101 GRANDE-RUE SAINT MICHEL'
|
||||
|
||||
# get Sego school from read-family
|
||||
url = conn + '/read-school-years-list'
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()['data']
|
||||
date_start = [x['dateStartYearSchool'] for x in res if x['text'] == school_year][0]
|
||||
assert date_start[10] > datetime.datetime.now().strftime('%Y-%m-%d')
|
||||
# school is filtered, but it is related to an hidden school year
|
||||
# field, not dateStartYearSchool, see #2425
|
||||
|
||||
url = conn + '/read-family?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['err'] == 0
|
||||
schools = [x['subscribeSchoolList'] for x in res['data']['childList'] if x['num'] == sego_id][0]
|
||||
assert len(schools) == 1
|
||||
assert schools[0]['schoolName'] == 'DUPONT PIERRE ELEMENTAIRE'
|
||||
|
||||
"""
|
||||
Pré-inscription d'un enfant de 5 ans en CP avec rappprochement de fratrie pour celui de 7 ans :
|
||||
rapprochement dans le secteur de l'enfant.
|
||||
"""
|
||||
# get Sego school
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': sego_id,
|
||||
'year': school_year,
|
||||
'level': 'CE1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
schools = data['childSubscribeSchoolInformation']['subscribeSchoolYearList']
|
||||
assert len(schools) == 1
|
||||
assert schools[0]['subscribeSchool']['school']['idSchool'] == school_id
|
||||
assert schools[0]['subscribeSchool']['perim']['idPerim'] == '2707'
|
||||
|
||||
url = conn + '/create-child-school-pre-registration-with-sibling'
|
||||
payload = {
|
||||
'numPerson': create_data['maggie_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'GS',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idSchoolRequested': school_id,
|
||||
'numPersonSibling': sego_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert 'returnMessage' not in resp.json()
|
||||
assert resp.json()['data']['schoolName'] == 'CALAS MATERNELLE'
|
||||
assert resp.json()['data']['adresse'] == '47 RUE ACHILLE VIADIEU' # same sector
|
||||
|
||||
|
||||
def test_school_pre_registration_by_exemption(conn, create_data, school_year, exemption):
|
||||
"""
|
||||
Pré-inscription de l'enfant de 9 ans en dérogation :
|
||||
c'est une dérogation avec sélection du motif sur un établissement hors secteur
|
||||
"""
|
||||
# school list
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'year': school_year,
|
||||
'level': 'CM1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
schools = data['childSubscribeSchoolInformation']['subscribeSchoolInformation']['derogSchoolList']
|
||||
assert len(schools) > 1
|
||||
school_id = schools[0]['id']
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration-with-exemption'
|
||||
payload = {
|
||||
'numPerson': create_data['bart_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CM1',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idRequestSchool1': school_id,
|
||||
'derogReasonCode': exemption,
|
||||
'derogComment': 'bla',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert 'returnMessage' not in resp.json()
|
||||
assert resp.json()['data']['schoolName'] == 'AMIDONNIERS ELEMENTAIRE'
|
||||
assert resp.json()['data']['adresse'] == '123 ALL DE BRIENNE'
|
||||
|
||||
"""
|
||||
Pré-inscription de l'autre enfant de 5 ans en CP
|
||||
avec rapprochement de fratrie pour celui de 9 ans :
|
||||
rapprochement hors du secteur de l'enfant.
|
||||
"""
|
||||
|
||||
# check E124 error
|
||||
# get a school that do not provide a level in its sector
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['hugo_num'],
|
||||
'year': school_year,
|
||||
'level': 'GS',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert [
|
||||
x['idSchool']
|
||||
for x in data['childSubscribeSchoolInformation']['subscribeSchoolInformation']['derogSchoolList']
|
||||
if x['text'] == 'DIEUZAIDE JEAN MATERNELLE'
|
||||
] == ['2437']
|
||||
|
||||
# try to book on a sector that do not provide the requested level
|
||||
url = conn + '/create-child-school-pre-registration-with-sibling'
|
||||
payload = {
|
||||
'numPerson': create_data['hugo_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'CP',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idSchoolRequested': '2437',
|
||||
'numPersonSibling': create_data['bart_num'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 1
|
||||
assert resp.json()['err_class'] == 'passerelle.utils.soap.SOAPFault'
|
||||
assert 'E124' in resp.json()['err_desc']
|
||||
|
||||
# get Bart school
|
||||
url = conn + '/read-child-school-informations?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'year': school_year,
|
||||
'level': 'CM1',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
schools = data['childSubscribeSchoolInformation']['subscribeSchoolYearList']
|
||||
assert len(schools) == 1
|
||||
assert schools[0]['subscribeSchool']['school']['idSchool'] == school_id
|
||||
assert schools[0]['subscribeSchool']['perim']['idPerim'] == '2663'
|
||||
|
||||
# book
|
||||
url = conn + '/create-child-school-pre-registration-with-sibling'
|
||||
payload = {
|
||||
'numPerson': create_data['hugo_num'],
|
||||
'schoolYear': school_year,
|
||||
'levelCode': 'GS',
|
||||
'datePresubscribe': school_year + '-01-01',
|
||||
'idSchoolRequested': school_id,
|
||||
'numPersonSibling': create_data['bart_num'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert 'returnMessage' not in resp.json()
|
||||
assert resp.json()['data']['schoolName'] == 'AMIDONNIERS MATERNELLE'
|
||||
assert resp.json()['data']['adresse'] == '125 ALL DE BRIENNE'
|
|
@ -0,0 +1,369 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import link, unlink
|
||||
|
||||
|
||||
def test_perisco(perisco_subscribe_info):
|
||||
assert perisco_subscribe_info['info']['activity']['libelle1'] == 'TEST TEMPS DU MIDI 22/23'
|
||||
|
||||
|
||||
def test_perisco_adulte(perisco_subscribe_adulte_info):
|
||||
assert perisco_subscribe_adulte_info['info']['activity']['libelle1'] == 'TEST RESTAURATION ADULTE 22/23'
|
||||
|
||||
|
||||
def test_perisco_agenda(conn, create_data, perisco_subscribe_info):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_info['place']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# find first available booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
booking = None
|
||||
for booking in resp.json()['data']:
|
||||
if booking['disabled'] is False:
|
||||
break
|
||||
else:
|
||||
raise Exception('no booking available')
|
||||
assert booking['details']['activity_id'] == perisco_subscribe_info['activity']['id']
|
||||
assert booking['details']['activity_label'] == 'Temps du midi'
|
||||
assert booking['prefill'] is False
|
||||
|
||||
# book activity
|
||||
url = conn + '/update-child-agenda?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [booking['id']],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {
|
||||
'updated': True,
|
||||
'count': 1,
|
||||
'changes': [
|
||||
{
|
||||
'booked': True,
|
||||
'activity_id': booking['details']['activity_id'],
|
||||
'activity_label': 'Temps du midi',
|
||||
'day': booking['details']['day_str'],
|
||||
}
|
||||
],
|
||||
'err': 0,
|
||||
}
|
||||
|
||||
# check booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [x['prefill'] for x in resp.json()['data'] if x['id'] == booking['id']][0] is True
|
||||
|
||||
|
||||
def test_perisco_agenda_adulte(conn, create_data2, perisco_subscribe_adulte_info):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['rl1_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_adulte_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_adulte_info['place']['id'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# find first available booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl1_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) > 0
|
||||
booking = None
|
||||
for booking in resp.json()['data']:
|
||||
if booking['disabled'] is False:
|
||||
break
|
||||
else:
|
||||
raise Exception('no booking available')
|
||||
assert booking['details']['activity_id'] == perisco_subscribe_adulte_info['activity']['id']
|
||||
assert booking['details']['activity_label'] == 'RESTAURATION ADULTE'
|
||||
assert booking['prefill'] is False
|
||||
|
||||
# book activity
|
||||
url = conn + '/update-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'child_id': create_data2['rl1_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [booking['id']],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {
|
||||
'updated': True,
|
||||
'count': 1,
|
||||
'changes': [
|
||||
{
|
||||
'booked': True,
|
||||
'activity_id': booking['details']['activity_id'],
|
||||
'activity_label': 'RESTAURATION ADULTE',
|
||||
'day': booking['details']['day_str'],
|
||||
}
|
||||
],
|
||||
'err': 0,
|
||||
}
|
||||
|
||||
# check booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl1_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [x['prefill'] for x in resp.json()['data'] if x['id'] == booking['id']][0] is True
|
||||
|
||||
|
||||
def test_perisco_recurrent_week(conn, create_data, perisco_subscribe_info, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
# no subscribed activity
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 0
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_info['place']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['id'] == perisco_subscribe_info['activity']['id']
|
||||
assert [(x['text'], x['libelle'], x['libelle2']) for x in resp.json()['data']] == [
|
||||
('Temps du midi', 'TEST TEMPS DU MIDI 22/23', 'Temps du midi'),
|
||||
]
|
||||
|
||||
# get recurent-week gabarit
|
||||
url = conn + '/get-recurrent-week?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'ref_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [(x['id'], x['day']) for x in resp.json()['data']] == [
|
||||
('1-X', 'Lundi'),
|
||||
('2-X', 'Mardi'),
|
||||
('4-X', 'Jeudi'),
|
||||
('5-X', 'Vendredi'),
|
||||
]
|
||||
|
||||
# no booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['maggie_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert not any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
# set recurent-week gabarit
|
||||
url = conn + '/update-recurrent-week?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': perisco_subscribe_info['activity']['id'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'recurrent_week': ['1-X', '2-X'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
|
||||
# there is now some bookings
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['maggie_num'],
|
||||
'start_date': perisco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
|
||||
def test_perisco_recurrent_week_adulte(conn, create_data2, perisco_subscribe_adulte_info, reference_year):
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
# no subscribed activity
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 0
|
||||
|
||||
# subscription
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'unit_id': perisco_subscribe_adulte_info['unit']['id'],
|
||||
'place_id': perisco_subscribe_adulte_info['place']['id'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'nature': 'PERISCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert len(resp.json()['data']) == 1
|
||||
assert resp.json()['data'][0]['id'] == perisco_subscribe_adulte_info['activity']['id']
|
||||
assert [(x['text'], x['libelle'], x['libelle2']) for x in resp.json()['data']] == [
|
||||
('RESTAURATION ADULTE', 'TEST RESTAURATION ADULTE 22/23', 'RESTAURATION ADULTE')
|
||||
]
|
||||
|
||||
# get recurent-week gabarit
|
||||
url = conn + '/get-recurrent-week?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'ref_date': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert [(x['id'], x['day']) for x in resp.json()['data']] == [
|
||||
('1-X', 'Lundi'),
|
||||
('2-X', 'Mardi'),
|
||||
('3-X', 'Mercredi'),
|
||||
('4-X', 'Jeudi'),
|
||||
('5-X', 'Vendredi'),
|
||||
]
|
||||
|
||||
# no booking
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl2_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert not any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
# set recurent-week gabarit
|
||||
url = conn + '/update-recurrent-week?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['rl2_num'],
|
||||
'activity_id': perisco_subscribe_adulte_info['activity']['id'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
'recurrent_week': ['1-X', '2-X'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
|
||||
# there is now some bookings
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data2['name_id']
|
||||
params = {
|
||||
'child_id': create_data2['rl2_num'],
|
||||
'start_date': perisco_subscribe_adulte_info['unit']['dateStart'][:10],
|
||||
'end_date': perisco_subscribe_adulte_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert any(x['prefill'] for x in resp.json()['data'])
|
|
@ -0,0 +1,261 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import get_subscription_info, link, unlink
|
||||
|
||||
# LOISIR is like EXTRACO (most tests are redondants) but :
|
||||
# * there is no calendar (days) to provide.
|
||||
# * there is a general catalog to display
|
||||
|
||||
|
||||
def test_catalog_general_loisirs(conn, update_data):
|
||||
unlink(conn, update_data['name_id'])
|
||||
link(conn, update_data)
|
||||
url = conn + '/read-activity-list'
|
||||
params = {'ref_date': datetime.date.today().strftime('%Y-%m-%d')}
|
||||
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
labels = [x['text'] for x in resp.json()['data']]
|
||||
assert (
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 13h45/17h - 8/15Ans, ARGOULETS'
|
||||
in labels
|
||||
)
|
||||
assert (
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 14h/16h30 - 10/15Ans, LA RAMEE'
|
||||
in labels
|
||||
)
|
||||
assert (
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 15h30/17h - 8/15Ans, ARGOULETS'
|
||||
in labels
|
||||
)
|
||||
assert 'Promenade forêt enchantée, TEST promenade forêt enchantée, TERRITOIRE OUEST' in labels
|
||||
assert 'Vitrail Fusing 1/2 Je Adultes, Inscription annuelle, Centre Culturel ALBAN MINVILLE' in labels
|
||||
|
||||
for item in resp.json()['data']:
|
||||
if (
|
||||
item['text']
|
||||
== 'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES, MERCREDI - 13h45/17h - 8/15Ans, ARGOULETS'
|
||||
):
|
||||
assert item['criterias'] == {
|
||||
'service': {'text': 'Service', 'data': {'sports': 'Sports'}, 'order': ['sports']},
|
||||
'nature': {
|
||||
'text': "Nature de l'activité",
|
||||
'data': {'1': 'Activités Régulières'},
|
||||
'order': ['1'],
|
||||
},
|
||||
'type': {
|
||||
'text': "Type de l'activité",
|
||||
'data': {'activites-aquatiques': 'Activités Aquatiques'},
|
||||
'order': ['activites-aquatiques'],
|
||||
},
|
||||
'public': {
|
||||
'text': 'Public',
|
||||
'data': {'1': 'Enfant (3-11 ans)', '2': 'Ado (12-17 ans)'},
|
||||
'order': ['1', '2'],
|
||||
},
|
||||
'day': {'text': 'Jours', 'data': {'3': 'Mercredi'}, 'order': ['3']},
|
||||
'place': {'text': 'Lieu', 'data': {'A10053179757': 'ARGOULETS'}, 'order': ['A10053179757']},
|
||||
}
|
||||
assert item['activity']['activityPortail']['blocNoteList'] == [
|
||||
{
|
||||
'note': "Activité ayant lieu le Mercredi, merci de choisir votre tranche horraire en fonction de l'âge de votre enfant.",
|
||||
'numIndex': 1,
|
||||
}
|
||||
]
|
||||
if item['text'] == 'Promenade forêt enchantée, TEST promenade forêt enchantée, TERRITOIRE OUEST':
|
||||
assert item['criterias'] == {
|
||||
'service': {'text': 'Service', 'data': {'sports': 'Sports'}, 'order': ['sports']},
|
||||
'nature': {
|
||||
'text': "Nature de l'activité",
|
||||
'data': {'1': 'Activités Régulières'},
|
||||
'order': ['1'],
|
||||
},
|
||||
'type': {
|
||||
'text': "Type de l'activité",
|
||||
'data': {'activite-pedestre': 'Activité Pédestre'},
|
||||
'order': ['activite-pedestre'],
|
||||
},
|
||||
'public': {'text': 'Public', 'data': {'5': 'Sénior (60 ans et plus)'}, 'order': ['5']},
|
||||
'day': {
|
||||
'text': 'Jours',
|
||||
'data': {'1': 'Lundi', '2': 'Mardi', '3': 'Mercredi', '4': 'Jeudi', '5': 'Vendredi'},
|
||||
'order': ['1', '2', '3', '4', '5'],
|
||||
},
|
||||
'place': {
|
||||
'text': 'Lieu',
|
||||
'data': {'A10056517597': 'TERRITOIRE OUEST'},
|
||||
'order': ['A10056517597'],
|
||||
},
|
||||
}
|
||||
assert item['activity']['activityPortail']['blocNoteList'] == [
|
||||
{'note': 'Activité de promenade en forêt.', 'numIndex': 1}
|
||||
]
|
||||
|
||||
|
||||
def test_catalog_personnalise_loisirs(loisirs_subscribe_info):
|
||||
assert (
|
||||
loisirs_subscribe_info['info']['activity']['libelle1']
|
||||
== 'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES'
|
||||
)
|
||||
assert loisirs_subscribe_info['info']['calendarGeneration']['code'] == 'REQUIRED'
|
||||
assert [(x['id'], x['day']) for x in loisirs_subscribe_info['info']['recurrent_week']] == []
|
||||
assert loisirs_subscribe_info['info']['billingInformation'] == {
|
||||
'modeFact': 'FORFAIT',
|
||||
'quantity': 1.0,
|
||||
'unitPrice': 88.5,
|
||||
}
|
||||
|
||||
|
||||
def test_catalog_personnalise_loisirs_not_allowed(conn, create_data, reference_year):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
try:
|
||||
get_subscription_info(
|
||||
'LOISIRS',
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES',
|
||||
'MERCREDI - 15h30/17h - 8/15Ans',
|
||||
'ARGOULETS',
|
||||
conn,
|
||||
create_data['name_id'],
|
||||
create_data['rl1_num'],
|
||||
reference_year,
|
||||
)
|
||||
except Exception:
|
||||
return
|
||||
assert False, 'Adult can subscribe to child activity'
|
||||
|
||||
|
||||
def test_direct_subscribe(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['hugo_num'],
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# no idIns provided to remove subscription later
|
||||
assert resp.json()['data'] == {'controlOK': True, 'message': None}
|
||||
|
||||
|
||||
def test_direct_subscribe_out_town(conn, create_data2, loisirs_subscribe_info2, reference_year):
|
||||
assert loisirs_subscribe_info2['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['hugo_num'],
|
||||
'activity_id': loisirs_subscribe_info2['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info2['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info2['place']['id'],
|
||||
'start_date': loisirs_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info2['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# no idIns provided to remove subscription later
|
||||
assert resp.json()['data'] == {'controlOK': True, 'message': None}
|
||||
|
||||
|
||||
def test_subscribe_to_basket(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
basket_id = resp.json()['data']['basket']['id']
|
||||
|
||||
# remove subscription
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
|
||||
@pytest.mark.xfail(run=False)
|
||||
def test_global_capacity(conn, create_data2, loisirs_subscribe_info3, reference_year):
|
||||
assert loisirs_subscribe_info3['info']['controlResult']['controlOK'] is True
|
||||
unlink(conn, create_data2['name_id'])
|
||||
link(conn, create_data2)
|
||||
|
||||
# subscribe Bart
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data2['name_id']
|
||||
# url = conn + '/add-person-basket-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['bart_num'],
|
||||
'activity_id': loisirs_subscribe_info3['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info3['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info3['place']['id'],
|
||||
'start_date': loisirs_subscribe_info3['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info3['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
# basket_id = resp.json()['data']['basket']['id']
|
||||
|
||||
# subscribe Lisa
|
||||
payload['person_id'] = create_data2['lisa_num']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# subscribe Maggie
|
||||
payload['person_id'] = create_data2['maggie_num']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# can't subscribe Huggo
|
||||
payload['person_id'] = create_data2['hugo_num']
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 1
|
||||
assert resp.json()['err_desc'] == ''
|
||||
|
||||
# check capacity on main catalog
|
||||
url = conn + '/read-activity-list'
|
||||
params = {'ref_date': datetime.date.today().strftime('%Y-%m-%d')}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
for item in resp.json()['data']:
|
||||
if item['activity']['libelle'] == 'PUBLIK Vitrail Fusing 1/2 Je Adultes 2022/2023 - Mardi 14h-1':
|
||||
import pdb
|
||||
|
||||
pdb.set_trace()
|
||||
|
||||
# # remove subscriptions
|
||||
# url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
# payload = {'basket_id': basket_id}
|
||||
# resp = requests.post(url, json=payload)
|
||||
# resp.raise_for_status()
|
||||
# assert resp.json()['err'] == 0
|
|
@ -0,0 +1,322 @@
|
|||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
def test_catalog_personnalise_extrasco(extrasco_subscribe_info):
|
||||
assert (
|
||||
extrasco_subscribe_info['info']['activity']['libelle1']
|
||||
== 'PUBLIK ADL ELEMENTAIRE Maourine JUIN 22/23(NE PAS UTILISER)'
|
||||
)
|
||||
assert extrasco_subscribe_info['info']['calendarGeneration']['code'] == 'NOT_REQUIRED'
|
||||
assert extrasco_subscribe_info['info']['billingInformation'] == {
|
||||
'modeFact': 'PRESENCE',
|
||||
'quantity': None,
|
||||
'unitPrice': 11.5,
|
||||
}
|
||||
assert extrasco_subscribe_info['info']['activity']['blocNoteList'] == [
|
||||
{
|
||||
'note': 'Lien vers le réglement intérieur :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/03/23/flyer-sejour.pdf\r\nLien vers arrêté municipal :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/04/05/arrete-municipal.pdf',
|
||||
'numIndex': 1,
|
||||
}
|
||||
]
|
||||
assert (
|
||||
extrasco_subscribe_info['info']['agenda'][0]['details']['activity_label']
|
||||
== 'ADL ELEMENTAIRE Maourine Juin'
|
||||
)
|
||||
|
||||
|
||||
def test_catalog_personnalise_extrasco2(extrasco_subscribe_info2):
|
||||
assert (
|
||||
extrasco_subscribe_info2['info']['activity']['libelle1']
|
||||
== 'PUBLIK ADL MATERNELLE Lardenne JUIN 22/23 (NEPAS UTILISER)'
|
||||
)
|
||||
assert extrasco_subscribe_info2['info']['calendarGeneration']['code'] == 'FORBIDDEN'
|
||||
assert extrasco_subscribe_info2['info']['billingInformation'] == {
|
||||
'modeFact': 'PRESENCE',
|
||||
'quantity': None,
|
||||
'unitPrice': 11.5,
|
||||
}
|
||||
assert extrasco_subscribe_info2['info']['activity']['blocNoteList'] == [
|
||||
{
|
||||
'note': 'Lien vers le réglement intérieur :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/03/23/flyer-sejour.pdf\r\nLien vers arrêté municipal :\r\nhttps://portail-parsifal.test.entrouvert.org/media/uploads/2023/04/05/arrete-municipal.pdf',
|
||||
'numIndex': 1,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_direct_subscribe(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
url = conn + '/add-person-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['hugo_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
# no idIns provided to remove subscription later
|
||||
assert resp.json()['data'] == {'controlOK': True, 'message': None}
|
||||
|
||||
|
||||
def test_subscribe_with_conveyance(conn, create_data, extrasco_subscribe_info):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
assert extrasco_subscribe_info['info']['conveyance'] is not None
|
||||
morning = [
|
||||
x['id'] for x in extrasco_subscribe_info['info']['conveyance']['morningJourney']['depositPlaceList']
|
||||
]
|
||||
afternoon = [
|
||||
x['id'] for x in extrasco_subscribe_info['info']['conveyance']['afternoonJourney']['depositPlaceList']
|
||||
]
|
||||
assert len(morning) > 0
|
||||
assert len(afternoon) > 0
|
||||
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'conveyanceSubscribe/idPlaceMorning': morning[0],
|
||||
'conveyanceSubscribe/idPlaceAfternoon': afternoon[0],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
basket_id = resp.json()['data']['basket']['id']
|
||||
|
||||
# remove subscription
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
|
||||
def test_subscribe_with_recurrent_week(conn, create_data, extrasco_subscribe_info):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
assert [(x['id'], x['day']) for x in extrasco_subscribe_info['info']['recurrent_week']] == [
|
||||
('1-X', 'Lundi'),
|
||||
('2-X', 'Mardi'),
|
||||
('3-X', 'Mercredi'),
|
||||
('4-X', 'Jeudi'),
|
||||
('5-X', 'Vendredi'),
|
||||
]
|
||||
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'recurrent_week': ['1-X', '2-X'],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
basket_id = resp.json()['data']['basket']['id']
|
||||
|
||||
# there is now some bookings
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert any(x['prefill'] for x in resp.json()['data'])
|
||||
|
||||
# check quantity into basket
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
line = resp.json()['data'][0]['lignes'][0]
|
||||
assert line['prixUnit'] == 11.5
|
||||
assert line['qte'] > 0
|
||||
assert line['montant'] == line['prixUnit'] * line['qte']
|
||||
|
||||
# remove subscription
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
|
||||
def test_subscribe_with_agenda(conn, create_data, extrasco_subscribe_info):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_bookings():
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_perisco_bookings():
|
||||
url = conn + '/read-child-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'child_id': create_data['bart_num'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return [
|
||||
item
|
||||
for item in resp.json()['data']
|
||||
if item['details']['activity_id'] == extrasco_subscribe_info['activity']['id']
|
||||
]
|
||||
|
||||
# subscribe without providing calendar
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
basket_id = resp.json()['data']['basket']['id']
|
||||
|
||||
# no booking
|
||||
assert not any(x['prefill'] for x in get_bookings())
|
||||
assert not any(x['prefill'] for x in get_perisco_bookings())
|
||||
|
||||
# book using info calendar gabarit (booking registered from w.c.s. form)
|
||||
assert len(extrasco_subscribe_info['info']['agenda']) > 0
|
||||
assert not any(x['prefill'] for x in extrasco_subscribe_info['info']['agenda'])
|
||||
slots = [x['id'] for x in extrasco_subscribe_info['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
|
||||
# there is now 2 bookings
|
||||
assert len([x['prefill'] for x in get_bookings() if x['prefill'] is True]) == 2
|
||||
perisco_bookings = get_perisco_bookings()
|
||||
assert len([x['prefill'] for x in perisco_bookings if x['prefill'] is True]) == 2
|
||||
assert perisco_bookings[0]['details']['activity_label'] == 'ADL ELEMENTAIRE Maourine Juin'
|
||||
|
||||
# check quantity into basket
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
line = resp.json()['data'][0]['lignes'][0]
|
||||
assert (line['prixUnit'], line['qte'], line['montant']) == (11.5, 0.0, 0.0)
|
||||
|
||||
# unbook slots
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [False, False]
|
||||
assert not any(x['prefill'] for x in get_bookings())
|
||||
|
||||
# remove subscription
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
|
||||
|
||||
@pytest.mark.xfail(run=False)
|
||||
def test_daily_capacity(conn, create_data2, extrasco_subscribe_info3):
|
||||
assert extrasco_subscribe_info3['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def subscribe(child):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['%s_num' % child],
|
||||
'activity_id': extrasco_subscribe_info3['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info3['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info3['place']['id'],
|
||||
'start_date': extrasco_subscribe_info3['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info3['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']['basket']['id']
|
||||
|
||||
def book(child, slot):
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data2['name_id']
|
||||
payload = {
|
||||
'person_id': create_data2['%s_num' % child],
|
||||
'activity_id': extrasco_subscribe_info3['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info3['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info3['unit']['dateEnd'][:10],
|
||||
'booking_list': [slot],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
# subscribe all family childs
|
||||
basket_id = subscribe('bart')
|
||||
for child in 'lisa', 'maggie', 'hugo':
|
||||
assert subscribe(child) == basket_id
|
||||
|
||||
# book all childs on the same day
|
||||
assert len(extrasco_subscribe_info3['info']['agenda']) > 0
|
||||
assert not any(x['prefill'] for x in extrasco_subscribe_info3['info']['agenda'])
|
||||
slots = [x['id'] for x in extrasco_subscribe_info3['info']['agenda'] if x['disabled'] is False]
|
||||
for child in 'bart', 'lisa', 'maggie':
|
||||
resp = book(child, slots[-1])
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True]
|
||||
resp = book('hugo', slots[-1])
|
||||
assert resp.json()['err'] == 1
|
||||
assert resp.json()['err_desc'] == 0
|
||||
|
||||
# # remove subscriptions
|
||||
# url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
# payload = {'basket_id': basket_id}
|
||||
# resp = requests.post(url, json=payload)
|
||||
# resp.raise_for_status()
|
||||
# assert resp.json()['err'] == 0
|
|
@ -0,0 +1,557 @@
|
|||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
def test_basket_subscribe_extrasco(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'EXTRASCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_bookings(person_id):
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1 # 3 sur Larden
|
||||
|
||||
subs = subscriptions(create_data['bart_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 1
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert data[0]['text'] == 'ENFANCE LOISIRS'
|
||||
assert len(data[0]['lignes']) == 1 # 3 sur Larden
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
# get 3 idIns because we subscribe a generic unit
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1 # 3 sur Larden
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# cannot subscribe Bart twice
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 1
|
||||
assert 'E1019' in resp.json()['err_desc']
|
||||
assert len(get_baskets()) == 1
|
||||
|
||||
# delete basket
|
||||
# should be call by user or by cron job
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
subs = subscriptions(create_data['maggie_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 1
|
||||
|
||||
# delete (generic) basket line for Bart
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(data[0]['lignes']) == 2 # 6 sur Larden
|
||||
basket_id = data[0]['id']
|
||||
# line for the generic unit for Bart
|
||||
line_id = [
|
||||
y['id']
|
||||
for x in data
|
||||
for y in x['lignes']
|
||||
if y['personneInfo']['numPerson'] == int(create_data['bart_num'])
|
||||
if y['inscription']['idUnit'] == extrasco_subscribe_info['unit']['id']
|
||||
][0]
|
||||
url = conn + '/delete-basket-line?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'basket_id': basket_id,
|
||||
'line_id': line_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['lignes']}) == 1 # 3 sur Larden
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(get_baskets()) == 1
|
||||
assert len(data[0]['lignes']) == 1 # 3 sur Larden
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# re-subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# add bookings to Bart
|
||||
slots = [x['id'] for x in extrasco_subscribe_info['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['bart_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# add bookings to Maggie
|
||||
slots = [':'.join([create_data['maggie_num']] + x.split(':')[1:]) for x in slots]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['maggie_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# delete basket
|
||||
# should be call by user or by cron job
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
|
||||
@pytest.mark.xfail(run=False)
|
||||
def test_basket_subscribe_extrasco2(conn, create_data, extrasco_subscribe_info2, reference_year):
|
||||
"""Subscribing to a generic unit"""
|
||||
assert extrasco_subscribe_info2['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info2['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info2['place']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'EXTRASCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_bookings(person_id):
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1 # 3 expected
|
||||
|
||||
subs = subscriptions(create_data['bart_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 2
|
||||
assert [x['libelle'] for x in subs[0]['subscribesUnit']] == [
|
||||
'PUBLIK ADL MATERNELLE Lardenne JUIN 22/23 (NEPAS UTILISER)',
|
||||
'PUBLIK ADL MATER JOURNEE AVEC REPAS',
|
||||
]
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert data[0]['text'] == 'ENFANCE LOISIRS'
|
||||
assert len(data[0]['lignes']) == 1 # 3 expected
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
# we should get 3 idIns because we subscribe a generic unit
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1 # 3 expected
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# cannot subscribe Bart twice
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 1
|
||||
assert 'E1019' in resp.json()['err_desc']
|
||||
assert len(get_baskets()) == 1
|
||||
|
||||
# delete basket
|
||||
# should be call by user or by cron job
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# delete (generic) basket line for Bart
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(data[0]['lignes']) == 2 # 6 sur Larden
|
||||
basket_id = data[0]['id']
|
||||
# line for the generic unit for Bart
|
||||
line_id = [
|
||||
y['id']
|
||||
for x in data
|
||||
for y in x['lignes']
|
||||
if y['personneInfo']['numPerson'] == int(create_data['bart_num'])
|
||||
if y['inscription']['idUnit'] == extrasco_subscribe_info2['unit']['id']
|
||||
][0]
|
||||
url = conn + '/delete-basket-line?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'basket_id': basket_id,
|
||||
'line_id': line_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['lignes']}) == 1 # 3 sur Larden
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(get_baskets()) == 1
|
||||
assert len(data[0]['lignes']) == 1 # 3 sur Larden
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# re-subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
|
||||
# add bookings to Bart
|
||||
slots = [x['id'] for x in extrasco_subscribe_info2['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['bart_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# add bookings to Maggie
|
||||
slots = [':'.join([create_data['maggie_num']] + x.split(':')[1:]) for x in slots]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': extrasco_subscribe_info2['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info2['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info2['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert [x['booked'] for x in resp.json()['changes']] == [True, True]
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['maggie_num']) if x['prefill'] is True]) == 2
|
||||
|
||||
# delete basket
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
|
||||
def test_basket_subscribe_loisirs(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'LOISIRS',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return [
|
||||
x
|
||||
for x in resp.json()['data']
|
||||
if x['libelle'] == 'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES'
|
||||
]
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 109
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1
|
||||
subs = subscriptions(create_data['bart_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 2
|
||||
assert [x['libelle'] for x in subs[0]['subscribesUnit']] == [
|
||||
'TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES',
|
||||
'MERCREDI - 15h30/17h - 8/15Ans',
|
||||
]
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 109
|
||||
assert data[0]['text'] == 'SPORT'
|
||||
assert len(data[0]['lignes']) == 1
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1
|
||||
assert data[0]['lignes'][0]['montant'] == 88.5
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# cannot subscribe Bart twice
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 1
|
||||
assert 'E1019' in resp.json()['err_desc']
|
||||
assert len(get_baskets()) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
subs = subscriptions(create_data['maggie_num'])
|
||||
assert len(subs) == 1
|
||||
assert len(subs[0]['subscribesUnit']) == 2
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['id'] == basket_id
|
||||
assert data[0]['codeRegie'] == 109
|
||||
assert data[0]['text'] == 'SPORT'
|
||||
assert len(data[0]['lignes']) == 2
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 2
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 2
|
||||
assert all(x['montant'] == 88.5 for x in data[0]['lignes'])
|
||||
|
||||
# delete basket line for Bart
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(data[0]['lignes']) == 2
|
||||
basket_id = data[0]['id']
|
||||
# line for Bart
|
||||
line_id = [
|
||||
y['id']
|
||||
for x in data
|
||||
for y in x['lignes']
|
||||
if y['personneInfo']['numPerson'] == int(create_data['bart_num'])
|
||||
][0]
|
||||
url = conn + '/delete-basket-line?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'basket_id': basket_id,
|
||||
'line_id': line_id,
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['codeRegie'] == 109
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['lignes']}) == 1
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert len(get_baskets()) == 1
|
||||
assert len(data[0]['lignes']) == 1
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# delete basket
|
||||
url = conn + '/delete-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['data'] == 'ok'
|
||||
assert get_baskets() == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
|
@ -0,0 +1,346 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from .conftest import diff, link, unlink
|
||||
|
||||
|
||||
def test_direct_debit_order(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'codeRegie': '102',
|
||||
'bank/bankBIC': 'BDFEFR2T',
|
||||
'bank/bankIBAN': 'FR7630001007941234567890185',
|
||||
'bank/bankRUM': 'xxx',
|
||||
'bank/dateStart': '2023-01-01',
|
||||
'bank/bankAddress': '75049 PARIS cedex 01',
|
||||
'bank/civility': 'x',
|
||||
'bank/lastName': 'Ewing',
|
||||
'bank/firstName': 'John Ross',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
url = conn + '/get-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'codeRegie': '102',
|
||||
'dateRef': '2023-01-01',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
res['data']['numPerson'] = 'N/A'
|
||||
assert diff(res['data'], 'test_get_rl1_direct_debit_order.json')
|
||||
|
||||
|
||||
def test_pay_invoice_loisirs(conn, create_data, loisirs_subscribe_info, reference_year):
|
||||
assert loisirs_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': loisirs_subscribe_info['activity']['id'],
|
||||
'unit_id': loisirs_subscribe_info['unit']['id'],
|
||||
'place_id': loisirs_subscribe_info['place']['id'],
|
||||
'start_date': loisirs_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': loisirs_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 109
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 109
|
||||
assert data[0]['text'] == 'SPORT'
|
||||
assert len(data[0]['lignes']) == 2
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 2
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 2
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# validate basket de generate an invoice
|
||||
url = conn + '/validate-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data['idInsLst']) == 2
|
||||
assert len(data['factureLst']) == 1
|
||||
assert len(data['factureLst'][0]['lineInvoiceList']) == 2
|
||||
assert data['factureLst'][0]['regie']['code'] == 109
|
||||
invoice_num = data['factureLst'][0]['numInvoice']
|
||||
invoice_id = data['factureLst'][0]['idInvoice']
|
||||
assert get_baskets() == []
|
||||
|
||||
# get invoices paid
|
||||
url = conn + '/regie/109/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'data': [], 'err': 0}
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/109/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert data[0]['amount'] == '177' # ou juste > 0 ?
|
||||
assert data[0]['online_payment'] is True
|
||||
assert data[0]['paid'] is False
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
||||
|
||||
# payInvoice
|
||||
url = conn + '/regie/109/invoice/%s-%s/pay/' % (create_data['family_id'], invoice_num)
|
||||
payload = {
|
||||
'transaction_date': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S'),
|
||||
'transaction_id': 'xxx',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/109/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'has_invoice_for_payment': True, 'data': [], 'err': 0}
|
||||
|
||||
# get invoices paid
|
||||
url = conn + '/regie/109/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert data[0]['amount'] == '0'
|
||||
assert data[0]['total_amount'] == '177' # ou juste > 0 ?
|
||||
assert data[0]['online_payment'] is False
|
||||
assert data[0]['paid'] is True
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
||||
|
||||
|
||||
def test_payinvoice_extrasco(conn, create_data, extrasco_subscribe_info, reference_year):
|
||||
assert extrasco_subscribe_info['info']['controlResult']['controlOK'] is True
|
||||
|
||||
def get_baskets():
|
||||
url = conn + '/get-baskets?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def subscribe(person_id):
|
||||
url = conn + '/add-person-basket-subscription?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'unit_id': extrasco_subscribe_info['unit']['id'],
|
||||
'place_id': extrasco_subscribe_info['place']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def subscriptions(person_id):
|
||||
url = conn + '/read-subscribe-activity-list?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'nature': 'EXTRASCO',
|
||||
'school_year': '%s-%s' % (reference_year, reference_year + 1),
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
def get_bookings(person_id):
|
||||
url = conn + '/read-activity-agenda?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'person_id': person_id,
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
return resp.json()['data']
|
||||
|
||||
# no subscription
|
||||
assert subscriptions(create_data['bart_num']) == []
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# empty basket
|
||||
assert get_baskets() == []
|
||||
|
||||
# subscribe Bart
|
||||
resp = subscribe(create_data['bart_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert data['basket']['codeRegie'] == 105
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
|
||||
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1
|
||||
|
||||
assert len(subscriptions(create_data['bart_num'])) == 1
|
||||
assert subscriptions(create_data['maggie_num']) == []
|
||||
|
||||
# basket
|
||||
data = get_baskets()
|
||||
assert len(data) == 1
|
||||
assert data[0]['codeRegie'] == 105
|
||||
assert len(data[0]['lignes']) == 1
|
||||
assert len({x['personneInfo']['numPerson'] for x in data[0]['lignes']}) == 1
|
||||
|
||||
assert len({x['idIns'] for x in data[0]['lignes']}) == 1
|
||||
basket_id = data[0]['id']
|
||||
|
||||
# subscribe Maggie
|
||||
resp = subscribe(create_data['maggie_num'])
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert data['controlResult'] == {'controlOK': True, 'message': None}
|
||||
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 2
|
||||
assert len(subscriptions(create_data['maggie_num'])) == 1
|
||||
|
||||
# add bookings to Bart
|
||||
slots = [x['id'] for x in extrasco_subscribe_info['info']['agenda'] if x['disabled'] is False]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['bart_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['bart_num']) if x['prefill'] is True]) > 0
|
||||
|
||||
# add bookings to Maggie
|
||||
slots = [':'.join([create_data['maggie_num']] + x.split(':')[1:]) for x in slots]
|
||||
url = conn + '/update-activity-agenda/?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'person_id': create_data['maggie_num'],
|
||||
'activity_id': extrasco_subscribe_info['activity']['id'],
|
||||
'start_date': extrasco_subscribe_info['unit']['dateStart'][:10],
|
||||
'end_date': extrasco_subscribe_info['unit']['dateEnd'][:10],
|
||||
'booking_list': [slots[0], slots[-1]],
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
assert resp.json()['updated'] is True
|
||||
assert len([x['prefill'] for x in get_bookings(create_data['maggie_num']) if x['prefill'] is True]) > 0
|
||||
|
||||
# validate basket
|
||||
url = conn + '/validate-basket?NameID=%s' % create_data['name_id']
|
||||
payload = {'basket_id': basket_id}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data['idInsLst']) == 2
|
||||
assert len(data['factureLst']) == 1
|
||||
assert get_baskets() == []
|
||||
assert len(data['factureLst'][0]['lineInvoiceList']) == 2
|
||||
assert data['factureLst'][0]['regie']['code'] == 105
|
||||
invoice_num = data['factureLst'][0]['numInvoice']
|
||||
invoice_id = data['factureLst'][0]['idInvoice']
|
||||
|
||||
# get invoices paid
|
||||
url = conn + '/regie/105/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'data': [], 'err': 0}
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/105/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert int(data[0]['amount']) > 0
|
||||
assert data[0]['online_payment'] is True
|
||||
assert data[0]['paid'] is False
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
||||
|
||||
# payInvoice
|
||||
url = conn + '/regie/105/invoice/%s-%s/pay/' % (create_data['family_id'], invoice_num)
|
||||
payload = {
|
||||
'transaction_date': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S'),
|
||||
'transaction_id': 'xxx',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
# get invoices to be paid
|
||||
url = conn + '/regie/105/invoices?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json() == {'has_invoice_for_payment': True, 'data': [], 'err': 0}
|
||||
|
||||
# get invoices history
|
||||
url = conn + '/regie/105/invoices/history?NameID=%s' % create_data['name_id']
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
assert resp.json()['err'] == 0
|
||||
data = resp.json()['data']
|
||||
assert len(data) == 1
|
||||
assert data[0]['amount'] == '0'
|
||||
assert int(data[0]['total_amount']) > 0
|
||||
assert data[0]['online_payment'] is False
|
||||
assert data[0]['paid'] is True
|
||||
assert len({x['idIns'] for x in data[0]['maelis_item']['lineInvoiceList']}) == 2
|
||||
assert data[0]['maelis_item']['idInvoice'] == invoice_id
|
||||
assert data[0]['maelis_item']['numInvoice'] == invoice_num
|
|
@ -1,36 +0,0 @@
|
|||
import requests
|
||||
|
||||
from .conftest import diff, link, unlink
|
||||
|
||||
|
||||
def test_direct_debit_order(conn, create_data):
|
||||
unlink(conn, create_data['name_id'])
|
||||
link(conn, create_data)
|
||||
|
||||
url = conn + '/add-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
payload = {
|
||||
'codeRegie': '1',
|
||||
'bank/bankBIC': 'BDFEFR2T',
|
||||
'bank/bankIBAN': 'FR7630001007941234567890185',
|
||||
'bank/bankRUM': 'xxx',
|
||||
'bank/dateStart': '2023-01-01',
|
||||
'bank/bankAddress': '75049 PARIS cedex 01',
|
||||
'bank/civility': 'x',
|
||||
'bank/lastName': 'Ewing',
|
||||
'bank/firstName': 'John Ross',
|
||||
}
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
assert res['data'] == 'ok'
|
||||
|
||||
url = conn + '/get-rl1-direct-debit-order?NameID=%s' % create_data['name_id']
|
||||
params = {
|
||||
'codeRegie': '1',
|
||||
'dateRef': '2023-01-01',
|
||||
}
|
||||
resp = requests.get(url, params=params)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
res['data']['numPerson'] = 'N/A'
|
||||
assert diff(res['data'], 'test_get_rl1_direct_debit_order.json')
|
|
@ -2,9 +2,9 @@ import pytest
|
|||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--url", help="Url of a passerelle Vivaticket connector instance")
|
||||
parser.addoption('--url', help='Url of a passerelle Vivaticket connector instance')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def conn(request):
|
||||
return request.config.getoption("--url")
|
||||
return request.config.getoption('--url')
|
||||
|
|
|
@ -6,7 +6,7 @@ import requests
|
|||
|
||||
|
||||
def call_generic(conn, endpoint):
|
||||
print("%s \n" % endpoint)
|
||||
print('%s \n' % endpoint)
|
||||
url = conn + '/%s' % endpoint
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
@ -50,7 +50,7 @@ def test_book_event(conn):
|
|||
themes = call_generic(conn, 'themes')
|
||||
random.shuffle(themes)
|
||||
payload['theme'] = themes[0]['id']
|
||||
print("Creating booking with the following payload:\n%s" % payload)
|
||||
print('Creating booking with the following payload:\n%s' % payload)
|
||||
resp = requests.post(url, json=payload)
|
||||
resp.raise_for_status()
|
||||
res = resp.json()
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/sh -ue
|
||||
|
||||
test -d wcs || git clone https://git.entrouvert.org/wcs.git
|
||||
(cd wcs && git pull)
|
|
@ -2,8 +2,8 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passerelle.settings")
|
||||
if __name__ == '__main__':
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'passerelle.settings')
|
||||
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
|
|
|
@ -102,6 +102,7 @@ class AddressResource(BaseResource):
|
|||
@endpoint(
|
||||
name='sectors',
|
||||
description=_('List related Sectorizations'),
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'id': {'description': _('Sector Identifier (slug)')},
|
||||
'q': {'description': _('Filter by Sector Title or Identifier')},
|
||||
|
|
|
@ -2,7 +2,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('base', '0006_resourcestatus'),
|
||||
]
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('actesweb', '0001_initial'),
|
||||
]
|
||||
|
|
|
@ -48,7 +48,7 @@ class ActesWeb(BaseResource):
|
|||
def basepath(self):
|
||||
return os.path.join(default_storage.path('actesweb'), self.slug)
|
||||
|
||||
@endpoint(perm='can_access', methods=['post'], description=_('Create demand'))
|
||||
@endpoint(methods=['post'], description=_('Create demand'))
|
||||
def create(self, request, *args, **kwargs):
|
||||
try:
|
||||
payload = json.loads(request.body)
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
# Generated by Django 3.2.18 on 2023-07-07 10:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('base', '0030_resourcelog_base_resour_appname_298cbc_idx'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AdullactPastell',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
|
||||
),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
(
|
||||
'basic_auth_username',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication username'
|
||||
),
|
||||
),
|
||||
(
|
||||
'basic_auth_password',
|
||||
models.CharField(
|
||||
blank=True, max_length=128, verbose_name='Basic authentication password'
|
||||
),
|
||||
),
|
||||
(
|
||||
'client_certificate',
|
||||
models.FileField(
|
||||
blank=True, null=True, upload_to='', verbose_name='TLS client certificate'
|
||||
),
|
||||
),
|
||||
(
|
||||
'trusted_certificate_authorities',
|
||||
models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS trusted CAs'),
|
||||
),
|
||||
(
|
||||
'verify_cert',
|
||||
models.BooleanField(blank=True, default=True, verbose_name='TLS verify certificates'),
|
||||
),
|
||||
(
|
||||
'http_proxy',
|
||||
models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'),
|
||||
),
|
||||
(
|
||||
'api_base_url',
|
||||
models.URLField(
|
||||
help_text='Example: https://pastell.example.com/api/v2/',
|
||||
max_length=128,
|
||||
verbose_name='API base URL',
|
||||
),
|
||||
),
|
||||
('token', models.CharField(blank=True, max_length=128, verbose_name='API token')),
|
||||
(
|
||||
'users',
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name='_adullact_pastell_adullactpastell_users_+',
|
||||
related_query_name='+',
|
||||
to='base.ApiUser',
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Adullact Pastell',
|
||||
},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,265 @@
|
|||
# passerelle - uniform access to multiple data sources and services
|
||||
# Copyright (C) 2023 Entr'ouvert
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import base64
|
||||
from urllib import parse as urlparse
|
||||
|
||||
import requests
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.http import HttpResponse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from passerelle.base.models import BaseResource, HTTPResource
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
|
||||
FILE_OBJECT_PROPERTIES = {
|
||||
'title': _('File object'),
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'description': _('Filename'),
|
||||
},
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'description': _('Content'),
|
||||
},
|
||||
'content_type': {
|
||||
'type': 'string',
|
||||
'description': _('Content type'),
|
||||
},
|
||||
},
|
||||
'required': ['filename', 'content'],
|
||||
}
|
||||
|
||||
|
||||
DOCUMENT_CREATION_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'required': ['type'],
|
||||
'additionalProperties': True,
|
||||
'properties': {
|
||||
'type': {'type': 'string', 'description': _('Document type')},
|
||||
'file_field_name': {
|
||||
'type': 'string',
|
||||
'description': _('Document file\'s field name'),
|
||||
},
|
||||
'file': FILE_OBJECT_PROPERTIES,
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'description': _('Filename (takes precedence over filename in "file" object)'),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
DOCUMENT_FILE_UPLOAD_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'required': ['file', 'file_field_name'],
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'filename': {
|
||||
'type': 'string',
|
||||
'description': _('Filename (takes precedence over filename in "file" object)'),
|
||||
},
|
||||
'file': FILE_OBJECT_PROPERTIES,
|
||||
'file_field_name': {
|
||||
'type': 'string',
|
||||
'description': _('Document file\'s field name'),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class AdullactPastell(BaseResource, HTTPResource):
|
||||
api_base_url = models.URLField(
|
||||
max_length=128,
|
||||
verbose_name=_('API base URL'),
|
||||
help_text=_('Example: https://pastell.example.com/api/v2/'),
|
||||
)
|
||||
token = models.CharField(max_length=128, blank=True, verbose_name=_('API token'))
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
||||
log_requests_errors = False
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Adullact Pastell')
|
||||
|
||||
def clean(self, *args, **kwargs):
|
||||
if not self.token and not self.basic_auth_username:
|
||||
raise ValidationError(_('API token or authentication username and password should be defined.'))
|
||||
return super().clean(*args, **kwargs)
|
||||
|
||||
def call(self, path, method='get', params=None, **kwargs):
|
||||
url = urlparse.urljoin(self.api_base_url, path)
|
||||
if self.token:
|
||||
kwargs.update({'headers': {'Authorization': 'Bearer: %s' % self.token}, 'auth': None})
|
||||
try:
|
||||
response = self.requests.request(url=url, method=method, params=params, **kwargs)
|
||||
response.raise_for_status()
|
||||
except (requests.Timeout, requests.RequestException) as e:
|
||||
raise APIError(str(e))
|
||||
return response
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
response = self.call('version')
|
||||
except APIError as e:
|
||||
raise Exception('Pastell server is down: %s' % e)
|
||||
return {'data': response.json()}
|
||||
|
||||
def upload_file(self, entity_id, document_id, file_field_name, data, **kwargs):
|
||||
filename = kwargs.get('filename') or data['filename']
|
||||
file_data = {
|
||||
'file_content': (
|
||||
filename,
|
||||
base64.b64decode(data['content']),
|
||||
data.get('content_type'),
|
||||
)
|
||||
}
|
||||
|
||||
return self.call(
|
||||
'entite/%s/document/%s/file/%s' % (entity_id, document_id, file_field_name),
|
||||
'post',
|
||||
files=file_data,
|
||||
data={'file_name': filename},
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
description=_('List entities'),
|
||||
datasource=True,
|
||||
)
|
||||
def entities(self, request):
|
||||
data = []
|
||||
response = self.call('entite')
|
||||
for item in response.json():
|
||||
item['id'] = item['id_e']
|
||||
item['text'] = item['denomination']
|
||||
data.append(item)
|
||||
return {'data': data}
|
||||
|
||||
@endpoint(
|
||||
description=_('List entity documents'),
|
||||
parameters={'entity_id': {'description': _('Entity ID'), 'example_value': '42'}},
|
||||
datasource=True,
|
||||
)
|
||||
def documents(self, request, entity_id):
|
||||
if request.GET.get('id'):
|
||||
response = self.call('entite/%s/document/%s' % (entity_id, request.GET['id']))
|
||||
return {'data': response.json()}
|
||||
|
||||
data = []
|
||||
response = self.call('entite/%s/document' % entity_id)
|
||||
for item in response.json():
|
||||
item['id'] = item['id_d']
|
||||
item['text'] = item['titre']
|
||||
data.append(item)
|
||||
return {'data': data}
|
||||
|
||||
@endpoint(
|
||||
post={
|
||||
'description': _('Create a document for an entity'),
|
||||
'request_body': {'schema': {'application/json': DOCUMENT_CREATION_SCHEMA}},
|
||||
},
|
||||
name='create-document',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
},
|
||||
)
|
||||
def create_document(self, request, entity_id, post_data):
|
||||
file_data = post_data.pop('file', None)
|
||||
file_field_name = post_data.pop('file_field_name', None)
|
||||
|
||||
# create document
|
||||
response = self.call('entite/%s/document' % entity_id, 'post', params=post_data)
|
||||
document_id = response.json()['id_d']
|
||||
|
||||
# update it with other attributes
|
||||
response = self.call('entite/%s/document/%s' % (entity_id, document_id), 'patch', params=post_data)
|
||||
|
||||
# upload file if it's filled
|
||||
if file_field_name and file_data:
|
||||
self.upload_file(entity_id, document_id, file_field_name, file_data, **post_data)
|
||||
|
||||
return {'data': response.json()}
|
||||
|
||||
@endpoint(
|
||||
post={
|
||||
'description': _('Upload a file to a document'),
|
||||
'request_body': {'schema': {'application/json': DOCUMENT_FILE_UPLOAD_SCHEMA}},
|
||||
},
|
||||
name='upload-document-file',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
'document_id': {'description': _('Document ID'), 'example_value': 'hDWtdSC'},
|
||||
},
|
||||
)
|
||||
def upload_document_file(self, request, entity_id, document_id, post_data):
|
||||
file_field_name = post_data.pop('file_field_name')
|
||||
file_data = post_data.pop('file')
|
||||
response = self.upload_file(entity_id, document_id, file_field_name, file_data, **post_data)
|
||||
return {'data': response.json()}
|
||||
|
||||
@endpoint(
|
||||
description=_('Get document\'s file'),
|
||||
name='get-document-file',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
'document_id': {'description': _('Document ID'), 'example_value': 'hDWtdSC'},
|
||||
'field_name': {
|
||||
'description': _('Document file\'s field name'),
|
||||
'example_value': 'document',
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_document_file(self, request, entity_id, document_id, field_name):
|
||||
document = self.call('entite/%s/document/%s/file/%s' % (entity_id, document_id, field_name))
|
||||
response = HttpResponse(document.content, content_type=document.headers['Content-Type'])
|
||||
response['Content-Disposition'] = document.headers['Content-disposition']
|
||||
return response
|
||||
|
||||
@endpoint(
|
||||
post={
|
||||
'description': _('Run action on document'),
|
||||
'request_body': {
|
||||
'schema': {
|
||||
'application/json': {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'type': 'object',
|
||||
'required': ['action_name'],
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'action_name': {'type': 'string', 'description': _('Action name')},
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
name='run-document-action',
|
||||
parameters={
|
||||
'entity_id': {'description': _('Entity ID'), 'example_value': '42'},
|
||||
'document_id': {'description': _('Document ID'), 'example_value': 'hDWtdSC'},
|
||||
},
|
||||
)
|
||||
def run_document_action(self, request, entity_id, document_id, post_data):
|
||||
response = self.call(
|
||||
'entite/%s/document/%s/action/%s' % (entity_id, document_id, post_data['action_name']), 'post'
|
||||
)
|
||||
return {'data': response.json()}
|
|
@ -2,7 +2,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('base', '0005_resourcelog'),
|
||||
]
|
||||
|
|
|
@ -2,7 +2,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('airquality', '0001_initial'),
|
||||
]
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('airquality', '0002_auto_20170920_0951'),
|
||||
]
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('airquality', '0003_remove_airquality_log_level'),
|
||||
]
|
||||
|
|
|
@ -44,6 +44,7 @@ class AirQuality(BaseResource):
|
|||
@endpoint(
|
||||
pattern=r'^(?P<country>\w+)/(?P<city>\w+)/$',
|
||||
example_pattern='{country}/{city}/',
|
||||
perm='OPEN',
|
||||
parameters={
|
||||
'country': {'description': _('Country Code'), 'example_value': 'fr'},
|
||||
'city': {'description': _('City Name'), 'example_value': 'lyon'},
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api_entreprise', '0001_initial'),
|
||||
]
|
||||
|
|
|
@ -14,7 +14,6 @@ def remove_url_path(apps, schema_editor):
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api_entreprise', '0002_auto_20190701_1357'),
|
||||
]
|
||||
|
|
|
@ -185,7 +185,6 @@ class APIEntreprise(BaseResource):
|
|||
METHOD_PARAM = {'description': _('method used for user identity matching'), 'example_value': 'simple'}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<association_id>\w+)/$',
|
||||
example_pattern='{association_id}/',
|
||||
description=_('Get association\'s documents'),
|
||||
|
@ -289,7 +288,6 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': document}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<siren>\w+)/$',
|
||||
example_pattern='{siren}/',
|
||||
description=_('Get firm\'s data from Infogreffe'),
|
||||
|
@ -305,7 +303,6 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': raw_data['data']}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<association_id>\w+)/$',
|
||||
example_pattern='{association_id}/',
|
||||
description=_('Get association\'s related informations'),
|
||||
|
@ -324,7 +321,6 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': res}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<siren>\w+)/$',
|
||||
example_pattern='{siren}/',
|
||||
description=_('Get firm\'s related informations'),
|
||||
|
@ -375,6 +371,7 @@ class APIEntreprise(BaseResource):
|
|||
'v3/infogreffe/rcs/unites_legales/%s/mandataires_sociaux' % siren, raw=True, **kwargs
|
||||
).get('data')
|
||||
for mandataire in mandataires_data:
|
||||
mandataire = mandataire.get('data', {})
|
||||
for key in ('nom', 'prenom', 'fonction'):
|
||||
if key not in mandataire:
|
||||
mandataire[key] = ''
|
||||
|
@ -384,7 +381,6 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': {'entreprise': data, 'etablissement_siege': siege_data}}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
methods=['get'],
|
||||
pattern=r'(?P<siret>\w+)/$',
|
||||
example_pattern='{siret}/',
|
||||
|
@ -419,7 +415,6 @@ class APIEntreprise(BaseResource):
|
|||
return {'data': res}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
methods=['get'],
|
||||
pattern=r'(?P<siret>\w+)/$',
|
||||
example_pattern='{siret}/',
|
||||
|
@ -432,67 +427,9 @@ class APIEntreprise(BaseResource):
|
|||
},
|
||||
)
|
||||
def exercices(self, request, siret, **kwargs):
|
||||
return self.get('v2/exercices/%s/' % siret, **kwargs)
|
||||
return self.get('v3/dgfip/etablissements/%s/chiffres_affaires' % siret, raw=True, **kwargs)
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<siren>\w+)/$',
|
||||
example_pattern='{siren}/',
|
||||
description=_('Get firm\'s annual workforce data'),
|
||||
parameters={
|
||||
'siren': SIREN_PARAM,
|
||||
'object': OBJECT_PARAM,
|
||||
'context': CONTEXT_PARAM,
|
||||
'recipient': RECIPIENT_PARAM,
|
||||
},
|
||||
)
|
||||
def effectifs_annuels_acoss_covid(self, request, siren, **kwargs):
|
||||
if len(siren) != 9:
|
||||
raise APIError(_('invalid SIREN length (must be 9 characters)'))
|
||||
return self.get('v2/effectifs_annuels_acoss_covid/%s/' % siren, **kwargs)
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<year>\w+)/(?P<month>\w+)/(?P<siren>\w+)/$',
|
||||
description=_('Get firm\'s monthly workforce data, by SIREN'),
|
||||
parameters={
|
||||
'year': YEAR_PARAM,
|
||||
'month': MONTH_PARAM,
|
||||
'siren': SIREN_PARAM,
|
||||
'object': OBJECT_PARAM,
|
||||
'context': CONTEXT_PARAM,
|
||||
'recipient': RECIPIENT_PARAM,
|
||||
},
|
||||
)
|
||||
def entreprise_effectifs_mensuels_acoss_covid(self, request, year, month, siren, **kwargs):
|
||||
if len(siren) != 9:
|
||||
raise APIError(_('invalid SIREN length (must be 9 characters)'))
|
||||
month = month.zfill(2)
|
||||
return self.get(
|
||||
'v2/effectifs_mensuels_acoss_covid/%s/%s/entreprise/%s/' % (year, month, siren), **kwargs
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<year>\w+)/(?P<month>\w+)/(?P<siret>\w+)/$',
|
||||
description=_('Get firm\'s monthly workforce data, by SIRET'),
|
||||
parameters={
|
||||
'year': YEAR_PARAM,
|
||||
'month': MONTH_PARAM,
|
||||
'siret': SIRET_PARAM,
|
||||
'object': OBJECT_PARAM,
|
||||
'context': CONTEXT_PARAM,
|
||||
'recipient': RECIPIENT_PARAM,
|
||||
},
|
||||
)
|
||||
def etablissement_effectifs_mensuels_acoss_covid(self, request, year, month, siret, **kwargs):
|
||||
month = month.zfill(2)
|
||||
return self.get(
|
||||
'v2/effectifs_mensuels_acoss_covid/%s/%s/etablissement/%s/' % (year, month, siret), **kwargs
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
pattern=r'(?P<siren>\w+)/$',
|
||||
description=_(
|
||||
'Match firm\'s society representative against local FranceConnect identity information'
|
||||
|
@ -511,18 +448,18 @@ class APIEntreprise(BaseResource):
|
|||
def match_mandataire_social(
|
||||
self, request, siren, first_name, last_name, birthdate, method='simple', **kwargs
|
||||
):
|
||||
entreprise = self.get(
|
||||
'v2/entreprises/%s/' % siren,
|
||||
raw=True,
|
||||
**kwargs,
|
||||
)
|
||||
mandataires = self.get(
|
||||
'v3/infogreffe/rcs/unites_legales/%s/mandataires_sociaux' % siren, raw=True, **kwargs
|
||||
).get('data', [])
|
||||
|
||||
methods = {
|
||||
'simple': simple_match,
|
||||
'levenshtein': levenshtein_match,
|
||||
}
|
||||
if method not in methods:
|
||||
return {'err': 1, 'err_desc': 'method %s not implemented' % method}
|
||||
for mandataire in entreprise.get('entreprise', {}).get('mandataires_sociaux', []):
|
||||
for mandataire in mandataires:
|
||||
mandataire = mandataire.get('data', {})
|
||||
if methods[method](mandataire, first_name, last_name, birthdate):
|
||||
return {'err': 0, 'data': mandataire}
|
||||
return {'err': 0, 'data': {}}
|
||||
|
|
|
@ -27,7 +27,7 @@ def normalize(s):
|
|||
def simple_match(mandataire, first_name, last_name, birthdate):
|
||||
if any([attr not in mandataire for attr in ['prenom', 'nom', 'date_naissance']]):
|
||||
return False
|
||||
if normalize(mandataire['prenom'].split(',')[0]) != normalize(first_name):
|
||||
if normalize(mandataire['prenom'].replace(',', ' ').split(maxsplit=1)[0]) != normalize(first_name):
|
||||
return False
|
||||
if normalize(mandataire['nom']) != normalize(last_name):
|
||||
return False
|
||||
|
@ -41,7 +41,12 @@ def levenshtein_match(mandataire, first_name, last_name, birthdate):
|
|||
return False
|
||||
dist_first_name = min(2, int(len(first_name) / 4))
|
||||
dist_last_name = min(2, int(len(last_name) / 4))
|
||||
if ldistance(normalize(mandataire['prenom'].split(',')[0]), normalize(first_name)) > dist_first_name:
|
||||
if (
|
||||
ldistance(
|
||||
normalize(mandataire['prenom'].replace(',', ' ').split(maxsplit=1)[0]), normalize(first_name)
|
||||
)
|
||||
> dist_first_name
|
||||
):
|
||||
return False
|
||||
if ldistance(normalize(mandataire['nom']), normalize(last_name)) > dist_last_name:
|
||||
return False
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
# Generated by Django 3.2.18 on 2023-04-14 17:35
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('base', '0030_resourcelog_base_resour_appname_298cbc_idx'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Resource',
|
||||
fields=[
|
||||
(
|
||||
'id',
|
||||
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
|
||||
),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
(
|
||||
'api_url',
|
||||
models.URLField(
|
||||
default='https://gw.dgfip.finances.gouv.fr/impotparticulier/1.0',
|
||||
max_length=256,
|
||||
verbose_name='DGFIP API base URL',
|
||||
),
|
||||
),
|
||||
('oauth_username', models.CharField(max_length=128, verbose_name='DGFIP API Username')),
|
||||
('oauth_password', models.CharField(max_length=128, verbose_name='DGFIP API Password')),
|
||||
(
|
||||
'oauth_scopes',
|
||||
models.CharField(max_length=128, verbose_name='DGFIP API Scopes', blank=True),
|
||||
),
|
||||
(
|
||||
'id_teleservice',
|
||||
models.TextField(max_length=128, verbose_name='DGFIP API ID_Teleservice', blank=True),
|
||||
),
|
||||
(
|
||||
'users',
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name='_api_impot_particulier_resource_users_+',
|
||||
related_query_name='+',
|
||||
to='base.ApiUser',
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'API Impot Particulier',
|
||||
},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,22 @@
|
|||
# Generated by Django 3.2.18 on 2023-05-25 09:49
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('api_impot_particulier', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='resource',
|
||||
name='id_teleservice',
|
||||
field=models.TextField(max_length=128, verbose_name='DGFIP API ID_Teleservice'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='resource',
|
||||
name='oauth_scopes',
|
||||
field=models.CharField(max_length=128, verbose_name='DGFIP API Scopes'),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,306 @@
|
|||
# passerelle - uniform access to multiple data sources and services
|
||||
# Copyright (C) 2023 Entr'ouvert
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
import hashlib
|
||||
import uuid
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import requests
|
||||
from django.core.cache import cache
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from passerelle.base.models import BaseResource
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
from passerelle.utils.timeout import Timeout
|
||||
|
||||
|
||||
class ServiceIsDown(APIError):
|
||||
def __init__(self):
|
||||
super().__init__(_('API Impot Particulier service is unavailable'))
|
||||
|
||||
def __str__(self):
|
||||
if self.__context__:
|
||||
return f'{super().__str__()}: {self.__context__}'
|
||||
return super().__str__()
|
||||
|
||||
|
||||
class Resource(BaseResource):
|
||||
api_url = models.URLField(
|
||||
_('DGFIP API base URL'),
|
||||
max_length=256,
|
||||
default='https://gw.dgfip.finances.gouv.fr/impotparticulier/1.0',
|
||||
)
|
||||
oauth_username = models.CharField(_('DGFIP API Username'), max_length=128)
|
||||
oauth_password = models.CharField(_('DGFIP API Password'), max_length=128)
|
||||
oauth_scopes = models.CharField(_('DGFIP API Scopes'), max_length=128)
|
||||
id_teleservice = models.TextField(_('DGFIP API ID_Teleservice'), max_length=128)
|
||||
|
||||
log_requests_errors = False
|
||||
requests_timeout = 30
|
||||
requests_max_retries = {
|
||||
'total': 3,
|
||||
'backoff_factor': 0.5,
|
||||
'allowed_methods': ['GET', 'POST'],
|
||||
# retry after: 0.5, 1.5 and 3.5 seconds
|
||||
'status_forcelist': [413, 429, 503, 504],
|
||||
}
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('API Impot Particulier')
|
||||
|
||||
@classmethod
|
||||
def parse_numero_fiscal(cls, value):
|
||||
value = value.strip().replace(' ', '')
|
||||
if not (value and value.isascii() and value.isdigit()):
|
||||
raise APIError(_('invalid numero_fiscal'))
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def parse_annee_de_revenu(cls, value):
|
||||
try:
|
||||
value = int(value)
|
||||
except (TypeError, ValueError):
|
||||
raise APIError(_('invalid annee_de_revenu'))
|
||||
today = datetime.date.today()
|
||||
if not (0 < today.year - value < 10):
|
||||
raise APIError(_('invalid annee_de_revenu'))
|
||||
return value
|
||||
|
||||
@endpoint(
|
||||
name='spi-situations-ir-assiettes-annrev',
|
||||
description=_('Provides revenue tax situation for a specific year.'),
|
||||
parameters={
|
||||
'numero_fiscal': {
|
||||
'description': _('Tax number of the person'),
|
||||
},
|
||||
'annee_de_revenu': {
|
||||
'description': _('Income year'),
|
||||
},
|
||||
},
|
||||
)
|
||||
def spi_situations_ir_assiettes_annrev(self, request, numero_fiscal, annee_de_revenu):
|
||||
numero_fiscal = self.parse_numero_fiscal(numero_fiscal)
|
||||
annee_de_revenu = self.parse_annee_de_revenu(annee_de_revenu)
|
||||
return {
|
||||
'data': self.get_spi_situations_ir_assiettes_annrev(
|
||||
numero_fiscal=numero_fiscal, annee_de_revenu=annee_de_revenu, timeout=Timeout(20)
|
||||
)
|
||||
}
|
||||
|
||||
def get_spi_situations_ir_assiettes_annrev(self, numero_fiscal, annee_de_revenu, timeout=None):
|
||||
return self.call(
|
||||
name='spi-situations-ir-assiettes-deuxans',
|
||||
endpoint_template='spi/{spi}/situations/ir/assiettes/annrev/{annrev}',
|
||||
timeout=timeout,
|
||||
spi=numero_fiscal,
|
||||
annrev=annee_de_revenu,
|
||||
accept='application/prs.dgfip.part.situations.ir.assiettes.v1+json',
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
name='spi-situations-th-assiettes-principale-annrev',
|
||||
description=_('Provides housing tax situation for a specific year.'),
|
||||
parameters={
|
||||
'numero_fiscal': {
|
||||
'description': _('Tax number of the person'),
|
||||
},
|
||||
'annee_de_revenu': {
|
||||
'description': _('Income year'),
|
||||
},
|
||||
},
|
||||
)
|
||||
def spi_situations_th_assiettes_principale_annrev(self, request, numero_fiscal, annee_de_revenu):
|
||||
numero_fiscal = self.parse_numero_fiscal(numero_fiscal)
|
||||
annee_de_revenu = self.parse_annee_de_revenu(annee_de_revenu)
|
||||
return {
|
||||
'data': self.get_spi_situations_th_assiettes_principale_annrev(
|
||||
numero_fiscal=numero_fiscal, annee_de_revenu=annee_de_revenu, timeout=Timeout(20)
|
||||
)
|
||||
}
|
||||
|
||||
def get_spi_situations_th_assiettes_principale_annrev(self, numero_fiscal, annee_de_revenu, timeout=None):
|
||||
return self.call(
|
||||
name='spi-situations-th-assiettes-principale-deuxans',
|
||||
endpoint_template='spi/{spi}/situations/th/assiettes/principale/annrev/{annrev}',
|
||||
timeout=timeout,
|
||||
spi=numero_fiscal,
|
||||
annrev=annee_de_revenu,
|
||||
accept='application/prs.dgfip.part.situations.th.assiettes.v1+json',
|
||||
)
|
||||
|
||||
def call(self, name, endpoint_template, timeout=None, **kwargs):
|
||||
correlation_id = str(uuid.uuid4().hex)
|
||||
kwargs_formatted = ', '.join(f'{key}={value}' for key, value in kwargs.items())
|
||||
try:
|
||||
data = self.get_tax_data(
|
||||
session=self.requests,
|
||||
base_url=self.api_url,
|
||||
access_token=self._get_access_token(timeout=timeout),
|
||||
correlation_id=correlation_id,
|
||||
endpoint_template=endpoint_template,
|
||||
id_teleservice=self.id_teleservice,
|
||||
timeout=timeout,
|
||||
**kwargs,
|
||||
)
|
||||
except ServiceIsDown as e:
|
||||
self.logger.warning(
|
||||
'%s(%s) failed: %s',
|
||||
name,
|
||||
kwargs_formatted,
|
||||
e,
|
||||
extra={
|
||||
'correlation_id': correlation_id,
|
||||
'id_teleservice': self.id_teleservice,
|
||||
'kwargs': kwargs,
|
||||
},
|
||||
)
|
||||
raise
|
||||
else:
|
||||
self.logger.warning(
|
||||
'%s(%s) success',
|
||||
name,
|
||||
kwargs_formatted,
|
||||
extra={
|
||||
'data': data,
|
||||
'correlation_id': correlation_id,
|
||||
'id_teleservice': self.id_teleservice,
|
||||
'kwargs': kwargs,
|
||||
},
|
||||
)
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def get_tax_data(
|
||||
cls,
|
||||
session,
|
||||
base_url,
|
||||
access_token,
|
||||
correlation_id,
|
||||
endpoint_template,
|
||||
accept,
|
||||
id_teleservice=None,
|
||||
headers=None,
|
||||
timeout=None,
|
||||
**kwargs,
|
||||
):
|
||||
headers = {
|
||||
**(headers or {}),
|
||||
'Authorization': f'Bearer {access_token}',
|
||||
'X-Correlation-ID': correlation_id,
|
||||
'Accept': accept,
|
||||
}
|
||||
if id_teleservice:
|
||||
headers['ID_Teleservice'] = id_teleservice
|
||||
|
||||
endpoint = endpoint_template.format(**kwargs)
|
||||
if not base_url.endswith('/'):
|
||||
base_url += '/'
|
||||
url = urljoin(base_url, endpoint)
|
||||
|
||||
if timeout is not None:
|
||||
timeout = float(timeout)
|
||||
|
||||
# api-impot-particulier error reporting is byzantine, some errors are
|
||||
# accompanied by a 4xx code, some others with a 20x code, some have a
|
||||
# JSON content, other are only identified by a codeapp header on
|
||||
# the response
|
||||
try:
|
||||
response = session.get(url, headers=headers, timeout=timeout)
|
||||
response.raise_for_status()
|
||||
except requests.HTTPError:
|
||||
try:
|
||||
content = response.json()['erreur']
|
||||
except (ValueError, KeyError):
|
||||
try:
|
||||
raise APIError(
|
||||
'api-impot-particulier error', data={'codeapp': response.headers['codeapp']}
|
||||
)
|
||||
except KeyError:
|
||||
pass
|
||||
raise ServiceIsDown
|
||||
raise APIError('api-impot-particulier-error', data=content)
|
||||
except requests.RequestException:
|
||||
raise ServiceIsDown
|
||||
|
||||
if response.status_code != 200:
|
||||
try:
|
||||
content = response.json()['erreur']
|
||||
except (ValueError, KeyError):
|
||||
try:
|
||||
raise APIError(
|
||||
'api-impot-particulier-error', data={'codeapp': response.headers['codeapp']}
|
||||
)
|
||||
except KeyError:
|
||||
raise ServiceIsDown
|
||||
raise APIError('api-impot-particulier error', data=content)
|
||||
|
||||
try:
|
||||
response_data = response.json()
|
||||
except ValueError:
|
||||
raise ServiceIsDown
|
||||
return response_data
|
||||
|
||||
def _get_access_token(self, timeout=None):
|
||||
key = (
|
||||
'dgfip-at-'
|
||||
+ hashlib.sha256(
|
||||
f'{self.oauth_username}-{self.oauth_password}-{self.api_url}'.encode()
|
||||
).hexdigest()
|
||||
)
|
||||
|
||||
access_token = cache.get(key)
|
||||
if not access_token:
|
||||
access_token = self.get_access_token(
|
||||
session=self.requests,
|
||||
base_url=self.api_url,
|
||||
username=self.oauth_username,
|
||||
password=self.oauth_password,
|
||||
scope=self.oauth_scopes,
|
||||
timeout=timeout,
|
||||
)
|
||||
cache.set(key, access_token, 300)
|
||||
return access_token
|
||||
|
||||
@classmethod
|
||||
def get_access_token(cls, session, base_url, username, password, scope, timeout=None):
|
||||
data = {
|
||||
'grant_type': 'client_credentials',
|
||||
}
|
||||
if scope:
|
||||
data['scope'] = scope
|
||||
|
||||
url = urljoin(base_url, '/token')
|
||||
|
||||
if timeout is not None:
|
||||
timeout = float(timeout)
|
||||
|
||||
try:
|
||||
response = session.post(url, data=data, auth=(username, password), timeout=timeout)
|
||||
response.raise_for_status()
|
||||
except requests.RequestException:
|
||||
raise ServiceIsDown
|
||||
try:
|
||||
response_data = response.json()
|
||||
access_token = response_data['access_token']
|
||||
response_data = response.json()
|
||||
except (ValueError, KeyError, TypeError):
|
||||
raise ServiceIsDown
|
||||
return access_token
|
|
@ -17,8 +17,9 @@ KNOWN_ERRORS = {
|
|||
'Pas de droit sur la période demandée pour la prestation sélectionnée et le bénéficiaire choisi',
|
||||
'Pas de droit sur la période demandée pour la prestation sélectionnée.',
|
||||
"Votre quotient familial (Qf) sur cette période est non disponible. Pour plus d'information, contactez-nous.",
|
||||
# API particulier error message not from the source above
|
||||
# API particulier error messages not from the source above
|
||||
'Les paramètres fournis sont incorrects ou ne correspondent pas à un avis',
|
||||
"L'identifiant indiqué n'existe pas, n'est pas connu ou ne comporte aucune information pour cet appel.",
|
||||
},
|
||||
400: {
|
||||
'Absence de code confidentiel. Le document ne peut être édité.',
|
||||
|
@ -30,6 +31,8 @@ KNOWN_ERRORS = {
|
|||
'Il existe des droits pour la prestation sélectionnée sur le dossier et/ou la période demandée',
|
||||
'Il existe des droits pour la prestation sélectionnée sur le dossier et/ou la période demandée (après date du jour)',
|
||||
'L’opérateurs téléphonique» ne propose pas de raccordement SMS avec un prestataire externe (raccordement avec un numéro court). ',
|
||||
# API particulier error messages not from the source above
|
||||
"La référence de l'avis n'est pas correctement formatée",
|
||||
},
|
||||
500: {
|
||||
'Les informations souhaitées sont momentanément indisponibles. Merci de renouveler votre demande ultérieurement.',
|
||||
|
@ -39,7 +42,7 @@ KNOWN_ERRORS = {
|
|||
"Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. Des paramètres manquent.",
|
||||
(
|
||||
"Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. "
|
||||
"La taille du message ne doit pas être supérieure à 160 caractères."
|
||||
'La taille du message ne doit pas être supérieure à 160 caractères.'
|
||||
),
|
||||
(
|
||||
"Votre demande n'a pu aboutir en raison d'un incident technique lié à l'appel au service IMC. "
|
||||
|
@ -50,7 +53,7 @@ KNOWN_ERRORS = {
|
|||
"Votre demande n'a pu aboutir en raison d'une erreur technique lié à l'appel au service IMC.",
|
||||
(
|
||||
"Votre demande n’a pu aboutir en raison d'un problème technique lié aux données entrantes du webservice. "
|
||||
"Merci de renouveler votre demande ultérieurement."
|
||||
'Merci de renouveler votre demande ultérieurement.'
|
||||
),
|
||||
},
|
||||
}
|
||||
|
|
|
@ -2,7 +2,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('base', '0002_auto_20151009_0326'),
|
||||
]
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api_particulier', '0001_initial'),
|
||||
]
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api_particulier', '0002_auto_20181118_0807'),
|
||||
]
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api_particulier', '0003_auto_20190212_0426'),
|
||||
]
|
||||
|
|
|
@ -5,7 +5,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api_particulier', '0004_auto_20190215_0807'),
|
||||
]
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api_particulier', '0005_auto_20210610_1508'),
|
||||
]
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
# Generated by Django 3.2.18 on 2023-12-13 10:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('api_particulier', '0006_api_key_length_1024'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='apiparticulier',
|
||||
name='api_key',
|
||||
field=models.CharField(blank=True, default='', max_length=2048, verbose_name='API key'),
|
||||
),
|
||||
]
|
|
@ -63,7 +63,7 @@ class APIParticulier(BaseResource):
|
|||
choices=[(key, platform['label']) for key, platform in PLATFORMS.items()],
|
||||
)
|
||||
|
||||
api_key = models.CharField(max_length=1024, default='', blank=True, verbose_name=_('API key'))
|
||||
api_key = models.CharField(max_length=2048, default='', blank=True, verbose_name=_('API key'))
|
||||
|
||||
log_requests_errors = False
|
||||
|
||||
|
@ -170,7 +170,6 @@ class APIParticulier(BaseResource):
|
|||
self.save()
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
description=_('Get scopes available'),
|
||||
display_order=1,
|
||||
)
|
||||
|
@ -184,7 +183,6 @@ class APIParticulier(BaseResource):
|
|||
}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
show=False,
|
||||
description=_('Get citizen\'s fiscal informations'),
|
||||
parameters={
|
||||
|
@ -208,7 +206,6 @@ class APIParticulier(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='avis-imposition',
|
||||
perm='can_access',
|
||||
description=_('Get citizen\'s fiscal informations'),
|
||||
parameters={
|
||||
'numero_fiscal': {
|
||||
|
@ -303,7 +300,6 @@ class APIParticulier(BaseResource):
|
|||
return data
|
||||
|
||||
@endpoint(
|
||||
perm='can_access',
|
||||
show=False,
|
||||
description=_('Get family allowances recipient informations'),
|
||||
parameters={
|
||||
|
@ -327,7 +323,6 @@ class APIParticulier(BaseResource):
|
|||
|
||||
@endpoint(
|
||||
name='situation-familiale',
|
||||
perm='can_access',
|
||||
description=_('Get family allowances recipient informations'),
|
||||
parameters={
|
||||
'code_postal': {
|
||||
|
@ -363,6 +358,11 @@ class APIParticulier(BaseResource):
|
|||
)
|
||||
data['data']['numero_allocataire'] = numero_allocataire
|
||||
data['data']['code_postal'] = code_postal
|
||||
for kind in 'allocataires', 'enfants':
|
||||
for person in data['data'].get(kind) or []:
|
||||
if len(person.get('dateDeNaissance') or '') == 8:
|
||||
birthdate = person['dateDeNaissance']
|
||||
person['dateDeNaissance_iso'] = birthdate[4:] + '-' + birthdate[2:4] + '-' + birthdate[:2]
|
||||
return data
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
|
|
@ -2,7 +2,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('base', '0002_auto_20151009_0326'),
|
||||
]
|
||||
|
|
|
@ -2,7 +2,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('arcgis', '0001_initial'),
|
||||
]
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('arcgis', '0002_auto_20170920_0951'),
|
||||
]
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('arcgis', '0003_auto_20181102_1550'),
|
||||
]
|
||||
|
|
|
@ -8,7 +8,6 @@ import passerelle.utils.templates
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('arcgis', '0004_remove_arcgis_log_level'),
|
||||
]
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('arcgis', '0005_auto_20200310_1517'),
|
||||
]
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('arcgis', '0006_auto_20200401_1025'),
|
||||
]
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import json
|
||||
import string
|
||||
from urllib import parse as urlparse
|
||||
|
||||
|
@ -32,6 +33,42 @@ from passerelle.utils.conversion import num2deg
|
|||
from passerelle.utils.jsonresponse import APIError
|
||||
from passerelle.utils.templates import render_to_string, validate_template
|
||||
|
||||
EDIT_ITEM_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'Item schema',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'geometry': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'x': {'type': 'string'},
|
||||
'y': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
'attributes': {'type': 'object'},
|
||||
},
|
||||
'required': ['attributes'],
|
||||
}
|
||||
|
||||
EDIT_SCHEMA = {
|
||||
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||
'title': 'Edit payload',
|
||||
'description': '',
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'adds': {
|
||||
'type': 'array',
|
||||
'description': 'Adds object',
|
||||
'items': EDIT_ITEM_SCHEMA,
|
||||
},
|
||||
'updates': {'type': 'array', 'description': 'Updates object', 'items': EDIT_ITEM_SCHEMA},
|
||||
'deletes': {'type': 'array', 'description': 'Deletes object', 'items': {'type': 'string'}},
|
||||
},
|
||||
'minProperties': 1,
|
||||
'unflatten': True,
|
||||
}
|
||||
|
||||
|
||||
class ArcGISError(APIError):
|
||||
pass
|
||||
|
@ -177,7 +214,6 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='mapservice-query',
|
||||
description=_('Map Service Query'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'folder': {
|
||||
'description': _('Folder name'),
|
||||
|
@ -247,7 +283,6 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
@endpoint(
|
||||
name='featureservice-query',
|
||||
description=_('Feature Service Query'),
|
||||
perm='can_access',
|
||||
parameters={
|
||||
'folder': {
|
||||
'description': _('Folder name'),
|
||||
|
@ -318,9 +353,49 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
text_fieldname=text_fieldname,
|
||||
)
|
||||
|
||||
@endpoint(
|
||||
name='featureservice-applyedits',
|
||||
description=_('Feature Service Apply Edits'),
|
||||
parameters={
|
||||
'folder': {
|
||||
'description': _('Folder name'),
|
||||
'example_value': 'Specialty',
|
||||
},
|
||||
'service': {
|
||||
'description': _('Service name'),
|
||||
'example_value': 'ESRI_StateCityHighway_USA',
|
||||
},
|
||||
'layer': {
|
||||
'description': _('Layer or table name'),
|
||||
'example_value': '1',
|
||||
},
|
||||
},
|
||||
post={'request_body': {'schema': {'application/json': EDIT_SCHEMA}}},
|
||||
)
|
||||
def featureservice_applyedits(
|
||||
self,
|
||||
request,
|
||||
post_data,
|
||||
service,
|
||||
layer='0',
|
||||
folder='',
|
||||
):
|
||||
# implement "apply edits" feature service
|
||||
# https://developers.arcgis.com/rest/services-reference/enterprise/apply-edits-feature-service-layer-.htm
|
||||
uri = 'services/'
|
||||
if folder:
|
||||
uri += folder + '/'
|
||||
uri = uri + service + '/FeatureServer/' + layer + '/applyEdits'
|
||||
params = {'f': 'pjson'}
|
||||
for key, value in post_data.items():
|
||||
post_data[key] = json.dumps(value)
|
||||
params.update(post_data)
|
||||
return {'data': self.request(urlparse.urljoin(self.base_url, uri), data=params)}
|
||||
|
||||
@endpoint(
|
||||
name='tile',
|
||||
description=_('Tiles layer'),
|
||||
perm='OPEN',
|
||||
pattern=r'^(?P<layer>[\w/]+)/(?P<zoom>\d+)/(?P<tile_x>\d+)/(?P<tile_y>\d+)\.png$',
|
||||
)
|
||||
def tile(self, request, layer, zoom, tile_x, tile_y):
|
||||
|
@ -349,7 +424,6 @@ class ArcGIS(BaseResource, HTTPResource):
|
|||
name='q',
|
||||
description=_('Query'),
|
||||
pattern=r'^(?P<query_slug>[\w:_-]+)/$',
|
||||
perm='can_access',
|
||||
show=False,
|
||||
)
|
||||
def q(self, request, query_slug, q=None, full=False, **kwargs):
|
||||
|
|
|
@ -2,7 +2,6 @@ from django.db import migrations, models
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('base', '0006_resourcestatus'),
|
||||
]
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations
|
|||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('arpege_ecp', '0001_initial'),
|
||||
]
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue