Compare commits

..

26 Commits

Author SHA1 Message Date
Nicolas Roche 5f6a4407bb en cours
gitea/passerelle/pipeline/head There was a failure building this commit Details
2023-06-14 18:33:42 +02:00
Nicolas Roche 02644bb98e toulouse-maelis: [functests] flagCom correction (#77634) 2023-06-14 16:03:58 +02:00
Nicolas Roche 875c5c8823 toulouse-maelis: [functest] include ban results into test (#77634) 2023-06-14 16:03:58 +02:00
Nicolas Roche ce381d6ff5 ... to remove, upload document ko 2023-06-14 16:03:58 +02:00
Nicolas Roche c82ddde1e0 to remove (plouf) 2023-06-14 16:03:58 +02:00
Nicolas Roche 7d50e061c6 toulouse-maelis: [functests] update invoice test on extrasco (#77634) 2023-06-14 16:03:58 +02:00
Nicolas Roche 7def7199d7 toulouse-maelis: [functests] update loisirs basket tests (#77634) 2023-06-14 16:03:58 +02:00
Nicolas Roche 4d58282997 toulouse-maelis: [functests] add subscriptions out from Toulouse (#77634) 2023-06-14 16:03:58 +02:00
Nicolas Roche 77bfc82b48 toulouse-maelis: [functests] re-enabling tests on extra-sco (#77634) 2023-06-14 16:03:58 +02:00
Nicolas Roche db0f21ae87 toulouse-maelis: [functests] re-enabling tests on loisirs (#77634) 2023-06-14 16:03:58 +02:00
Nicolas Roche c3f73b74f8 toulouse-maelis: [functests] add test for adultes on perisco (#77634) 2023-06-14 16:03:58 +02:00
Nicolas Roche 50da0e4834 toulouse-maelis: [functests] re-enable test on pericso (#77634) 2023-06-14 16:03:57 +02:00
Nicolas Roche c4afb25064 toulouse-maelis: [functests] improve tests on scolaire (#77634) 2023-06-14 16:03:57 +02:00
Nicolas Roche cffe244f4f toulouse-maelis: [functests] re-enabling tests on ape (#77634) 2023-06-14 16:03:57 +02:00
Nicolas Roche c087eb5806 toulouse-maelis: [functests] locate test family into Toulouse (#77634) 2023-06-14 16:03:57 +02:00
Nicolas Roche 1ebccc1278 toulouse-maelis: [functests] update referentials (#77634) 2023-06-14 16:03:57 +02:00
Nicolas Roche f01a4dd6f7 toulouse-maelis: [functests] rename test families (#77634) 2023-06-14 16:03:57 +02:00
Nicolas Roche b811aad05f toulouse-maelis: [functests] complete tests on school subscription (#77634) 2023-06-14 16:03:57 +02:00
Nicolas Roche 2d59d96896 toulouse-maelis: [functests] add visa date to supplied documents (#77634) 2023-06-14 16:03:57 +02:00
Nicolas Roche a8d672eea7 toulouse-maelis: set quantity on booking (#78205) 2023-06-14 16:03:57 +02:00
Nicolas Roche c8fb63fe3e toulouse-maelis: do not crash on null recurrent_week (#78517)
gitea/passerelle/pipeline/head There was a failure building this commit Details
2023-06-14 15:35:31 +02:00
Thomas NOËL 7a21a3e50c sne: use Debian version of cryptography (#78475)
gitea/passerelle/pipeline/head This commit looks good Details
2023-06-13 19:29:58 +02:00
Emmanuel Cazenave d479819f50 esup_signature: switch eppn and create_by_eppn parameters (#78405)
gitea/passerelle/pipeline/head This commit looks good Details
2023-06-12 22:37:05 +02:00
Emmanuel Cazenave 415e9f8a9e greco: send motifsrejet as None (#78264)
gitea/passerelle/pipeline/head There was a failure building this commit Details
2023-06-12 15:21:01 +02:00
Serghei Mihai 166d58591a grenoble_gru: cleanup phone numbers (#78187)
gitea/passerelle/pipeline/head This commit looks good Details
2023-06-12 09:44:24 +02:00
Nicolas Roche 2cac256517 photon: validate geojson content received (#68414)
gitea/passerelle/pipeline/head This commit looks good Details
2023-06-09 09:36:25 +02:00
20 changed files with 163 additions and 25 deletions

View File

@ -2,7 +2,7 @@
"number": "N/A",
"category": "BI",
"situation": "MARI",
"flagCom": false,
"flagCom": true,
"nbChild": 3,
"nbTotalChild": 4,
"nbAES": "1",
@ -72,7 +72,8 @@
"countryCode": null,
"cdDepartment": "19",
"communeCode_text": "BRIVE-LA-GAILLARDE",
"cdDepartment_text": "CORREZE"
"cdDepartment_text": "CORREZE",
"zipCode": "19100"
},
"dietcode": "MENU_AV",
"bPhoto": true,

View File

@ -70,7 +70,8 @@
"countryCode": null,
"cdDepartment": "19",
"communeCode_text": "BRIVE-LA-GAILLARDE",
"cdDepartment_text": "CORREZE"
"cdDepartment_text": "CORREZE",
"zipCode": "19100"
},
"adresse": {
"idStreet": "2317",
@ -145,7 +146,8 @@
"countryCode": null,
"cdDepartment": "19",
"communeCode_text": "BRIVE-LA-GAILLARDE",
"cdDepartment_text": "CORREZE"
"cdDepartment_text": "CORREZE",
"zipCode": "19100"
},
"dietcode": "MENU_AV",
"bPhoto": true,

View File

@ -12,7 +12,8 @@
"countryCode": null,
"cdDepartment": "19",
"communeCode_text": "BRIVE-LA-GAILLARDE",
"cdDepartment_text": "CORREZE"
"cdDepartment_text": "CORREZE",
"zipCode": "19100"
},
"adresse": {
"idStreet": null,

View File

@ -2,7 +2,7 @@
"number": "N/A",
"category": "BI",
"situation": "MARI",
"flagCom": false,
"flagCom": true,
"nbChild": 3,
"nbTotalChild": 4,
"nbAES": "1",
@ -61,7 +61,8 @@
"countryCode": null,
"cdDepartment": "19",
"communeCode_text": "BRIVE-LA-GAILLARDE",
"cdDepartment_text": "CORREZE"
"cdDepartment_text": "CORREZE",
"zipCode": "19100"
},
"adresse": {
"idStreet": null,
@ -135,7 +136,8 @@
"countryCode": null,
"cdDepartment": "19",
"communeCode_text": "BRIVE-LA-GAILLARDE",
"cdDepartment_text": "CORREZE"
"cdDepartment_text": "CORREZE",
"zipCode": "19100"
},
"dietcode": "MENU_AV",
"bPhoto": true,

View File

@ -3,8 +3,13 @@ import datetime
import pytest
import requests
from .conftest import link, unlink
def test_create_nursery_demand_on_existing_child(conn, create_data):
unlink(conn, create_data['name_id'])
link(conn, create_data)
url = conn + '/get-nursery-geojson'
resp = requests.get(url)
resp.raise_for_status()
@ -57,6 +62,9 @@ def test_create_nursery_demand_on_existing_child(conn, create_data):
def test_create_nursery_demand_adding_new_child(conn, create_data):
unlink(conn, create_data['name_id'])
link(conn, create_data)
url = conn + '/get-nursery-geojson'
resp = requests.get(url)
resp.raise_for_status()
@ -70,6 +78,7 @@ def test_create_nursery_demand_adding_new_child(conn, create_data):
res = resp.json()
assert res['err'] == 0
nb_childs = len(res['data']['childList'])
assert 'NELSON' not in [x['firstname'] for x in res['data']['childList']]
url = conn + '/create-nursery-demand'
payload = {
@ -102,4 +111,5 @@ def test_create_nursery_demand_adding_new_child(conn, create_data):
res = resp.json()
assert res['err'] == 0
assert len(res['data']['childList']) == nb_childs + 1
assert 'NELSON' in [x['firstname'] for x in res['data']['childList']]
assert res['data']['childList'][nb_childs]['num'] == child_id

View File

@ -205,11 +205,10 @@ def test_perisco_recurrent_week(conn, create_data, perisco_subscribe_info, refer
resp = requests.get(url, params=params)
resp.raise_for_status()
assert resp.json()['err'] == 0
assert len(resp.json()['data']) == 2
assert len(resp.json()['data']) == 1
assert resp.json()['data'][0]['id'] == perisco_subscribe_info['activity']['id']
assert [(x['text'], x['libelle'], x['libelle2']) for x in resp.json()['data']] == [
('Temps du midi', 'TEST TEMPS DU MIDI 22/23', 'Temps du midi'),
('Temps du midi', 'TEST TEMPS DU MIDI 22/23', 'Temps du midi'),
]
# get recurent-week gabarit

View File

@ -13,6 +13,7 @@ def test_catalog_personnalise_extrasco(extrasco_subscribe_info):
'quantity': None,
'unitPrice': 11.5,
}
assert item['activity']['activityPortail']['blocNoteList'] == []
def test_catalog_personnalise_extrasco2(extrasco_subscribe_info2):
@ -26,6 +27,7 @@ def test_catalog_personnalise_extrasco2(extrasco_subscribe_info2):
'quantity': None,
'unitPrice': 11.5,
}
assert item['activity']['activityPortail']['blocNoteList'] == []
def test_direct_subscribe(conn, create_data, extrasco_subscribe_info, reference_year):

View File

@ -265,6 +265,9 @@ def test_basket_subscribe_extrasco2(conn, create_data, extrasco_subscribe_info2,
assert data['controlResult'] == {'controlOK': True, 'message': None}
assert data['basket']['codeRegie'] == 105
assert len({x['personneInfo']['numPerson'] for x in data['basket']['lignes']}) == 1
import pdb
pdb.set_trace()
assert len({x['idIns'] for x in data['basket']['lignes']}) == 1 # 3 expected
assert len(subscriptions(create_data['bart_num'])) == 2 # redoundant

View File

@ -35,7 +35,7 @@ SIGN_REQUEST_SCHEMA = {
'title': '',
'description': '',
'type': 'object',
'required': ['file', 'recipients_emails', 'create_by_eppn'],
'required': ['file', 'recipients_emails', 'eppn'],
'unflatten': True,
'properties': collections.OrderedDict(
{
@ -98,7 +98,7 @@ SIGN_REQUEST_SCHEMA = {
'enum': ['visa', 'pdfImageStamp', 'certSign', 'nexuSign'],
'default': 'pdfImageStamp',
},
'create_by_eppn': {'type': 'string', 'description': 'EPPN of the sign request owner'},
'eppn': {'type': 'string', 'description': 'EPPN of the sign request owner'},
'title': {'type': 'string', 'description': 'Title'},
'target_url': {
'type': 'string',
@ -114,7 +114,7 @@ SIGN_REQUEST_WITH_WORKFLOW_SCHEMA = {
'title': '',
'description': '',
'type': 'object',
'required': ['file', 'eppn', 'workflow_id'],
'required': ['file', 'create_by_eppn', 'workflow_id'],
'unflatten': True,
'properties': collections.OrderedDict(
{
@ -151,7 +151,7 @@ SIGN_REQUEST_WITH_WORKFLOW_SCHEMA = {
'description': 'Steps numbers were every recipient has to sign',
'items': {'type': 'string'},
},
'eppn': {'type': 'string', 'description': 'EPPN of the sign request owner'},
'create_by_eppn': {'type': 'string', 'description': 'EPPN of the sign request owner'},
'workflow_id': {'type': 'string', 'description': 'Identifier of the workflow'},
'title': {'type': 'string', 'description': 'Title'},
'target_urls': {
@ -255,7 +255,7 @@ class EsupSignature(BaseResource, HTTPResource):
'force_all_sign': 'false',
'comment': 'a comment',
'sign_type': 'pdfImageStamp',
'create_by_eppn': 'aa@foo.com',
'eppn': 'aa@foo.com',
'title': 'a title',
'target_url': 'smb://foo.bar/location-1/',
},
@ -279,7 +279,7 @@ class EsupSignature(BaseResource, HTTPResource):
'recipientsCCEmails': clean_list(post_data.get('recipients_cc_emails', [])),
'comment': post_data.get('comment', ''),
'signType': post_data.get('sign_type', 'pdfImageStamp'),
'createByEppn': post_data['create_by_eppn'],
'eppn': post_data['eppn'],
'title': post_data.get('title', ''),
'targetUrl': post_data.get('target_url', ''),
}
@ -314,7 +314,7 @@ class EsupSignature(BaseResource, HTTPResource):
'content': 'JVBERi0xL...(base64 PDF)...',
},
'workflow_id': '99',
'eppn': 'aa@foo.com',
'create_by_eppn': 'aa@foo.com',
'title': 'a title',
'recipients_emails/0': '0*xx@foo.com',
'recipients_emails/1': '0*yy@foo.com',
@ -345,7 +345,7 @@ class EsupSignature(BaseResource, HTTPResource):
}
params = {
'createByEppn': post_data['eppn'],
'createByEppn': post_data['create_by_eppn'],
'title': post_data.get('title', ''),
'recipientsEmails': clean_list(post_data.get('recipients_emails', [])),
'allSignToCompletes': clean_list(post_data.get('all_sign_to_completes', [])),

View File

@ -23,12 +23,47 @@ from django.db.models import JSONField
from django.utils.encoding import force_bytes
from django.utils.http import urlencode
from django.utils.translation import gettext_lazy as _
from jsonschema import ValidationError, validate, validators
from requests import RequestException
from passerelle.base.models import BaseResource
from passerelle.utils.api import endpoint
from passerelle.utils.jsonresponse import APIError
GEOJSON_SCHEMA = {
'type': 'object',
'properties': {
'features': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'properties': {
'type': 'object',
},
'geometry': {
'type': 'object',
'properties': {
'type': {
'type': 'string',
},
'coordinates': {
'type': 'array',
'minItems': 2,
'items': {
'oneOf': [{'type': 'number'}, {'type': 'string'}],
},
},
},
'required': ['type', 'coordinates'],
},
},
'required': ['properties', 'geometry'],
},
},
},
}
class AddressCacheModel(models.Model):
api_id = models.CharField(max_length=32, unique=True)
@ -100,6 +135,18 @@ class Photon(BaseResource):
result['id'] = hashlib.md5(force_bytes(dict_dump)).hexdigest()
return result
def validate_geojson(self, response_json):
validator = validators.validator_for(GEOJSON_SCHEMA)
validator.META_SCHEMA['properties'].pop('description', None)
validator.META_SCHEMA['properties'].pop('title', None)
try:
validate(response_json, GEOJSON_SCHEMA)
except ValidationError as e:
error_msg = e.message
if e.path:
error_msg = '%s: %s' % ('/'.join(map(str, e.path)), error_msg)
raise APIError(error_msg)
@endpoint(
pattern='(?P<q>.+)?$',
description=_('Addresses list'),
@ -154,6 +201,7 @@ class Photon(BaseResource):
response_json = result_response.json()
except ValueError:
raise APIError('invalid photon response (%r)' % result_response.content[:1024])
self.validate_geojson(response_json)
result = []
for feature in response_json.get('features'):
@ -219,6 +267,7 @@ class Photon(BaseResource):
response_json = result_response.json()
except ValueError:
raise APIError('invalid photon response (%r)' % result_response.content[:1024])
self.validate_geojson(response_json)
result = None
for feature in response_json.get('features'):

View File

@ -44,8 +44,7 @@ class SNE(BaseResource, HTTPResource):
@property
def cert_public_bytes(self):
with self.client_certificate.open('rb') as f:
certs = x509.load_pem_x509_certificates(f.read())
cert = certs[0]
cert = x509.load_pem_x509_certificate(f.read())
return cert.public_bytes(encoding=serialization.Encoding.PEM)
def check_status(self):

View File

@ -405,6 +405,8 @@ Response :
data = sudsobject_to_dict(resp)
else:
data = self._rest_call('getMail', params=params)
if not data.get('motifsrejet'):
data['motifsrejet'] = None
return {'data': data}
@endpoint(

View File

@ -107,6 +107,11 @@ class GrenobleGRU(BaseResource):
payload = {'id': data['application_id']}
payload['dem_comp'] = types_params['dem_comp'] = data.get('dem_comp', 'Voirie')
for key, value in data.items():
if key.endswith('_phone'):
phone = ''.join(x for x in value if x in '0123456789')
data[key] = phone
payload.update(
{
# applicant informations

View File

@ -2426,7 +2426,7 @@ class ToulouseMaelis(BaseResource, HTTPResource):
'calendarLetter': None,
'isPresent': False,
}
for item in post_data.get('recurrent_week'):
for item in post_data.get('recurrent_week') or []:
day_num, key = item.split('-')
recurrent_week[day_num] = {
'dayNum': day_num,

View File

@ -31,13 +31,13 @@ def test_new(app, connector):
},
'recipients_emails/0': 'foo@invalid',
'recipients_emails/1': 'bar@invalid',
'create_by_eppn': 'baz@invalid',
'eppn': 'baz@invalid',
'title': 'a title',
}
with responses.RequestsMock() as rsps:
query_params = {
'recipientsEmails': ['foo@invalid', 'bar@invalid'],
'createByEppn': 'baz@invalid',
'eppn': 'baz@invalid',
'title': 'a title',
'signType': 'pdfImageStamp',
'pending': True,
@ -67,7 +67,7 @@ def test_new_with_workflow(app, connector):
'content_type': 'text/plain',
},
'workflow_id': '99',
'eppn': 'aa@foo.com',
'create_by_eppn': 'aa@foo.com',
'title': 'a title',
'recipients_emails/0': '0*xx@foo.com',
'recipients_emails/1': '0*yy@foo.com',

View File

@ -521,7 +521,7 @@ def test_greco_rest_answer_ok(app, rest_conn):
json={
'iddemande': 'DWPQGCTS',
'idgreco': '538554',
'motifsrejet': None,
'motifsrejet': '',
'etat': None,
'message': {
'bcc': {'item': []},

View File

@ -229,6 +229,7 @@ def test_demand_creation_params(app, setup):
payload = BASIC_PAYLOAD.copy()
payload['intervention_free_address'] = '169, rue du Château'
payload['applicant_free_address'] = '1, rue de l\'Est'
payload['applicant_phone'] = '01-02.03 04 05'
app.post_json(
reverse(
'generic-endpoint',
@ -243,6 +244,7 @@ def test_demand_creation_params(app, setup):
assert request_post.call_args[1]['data']['dem_pav'] == 'déchetterie'
assert request_post.call_args[1]['data']['dem_reponse'] == 0
assert request_post.call_args[1]['data']['obs_description_probleme'] == 'thank you '
assert request_post.call_args[1]['data']['dem_tel'] == '0102030405'
payload['applicant_requires_reply'] = True
payload['urgent_demand'] = 'True'

View File

@ -333,3 +333,28 @@ def test_photon_non_json(mocked_get, app, photon):
resp = app.get('/photon/%s/reverse' % photon.slug, params={'lat': '0', 'lon': '0'}, status=200)
assert resp.json['err'] == 1
assert resp.json['err_desc'] == "invalid photon response (b'xxx')"
@pytest.mark.parametrize(
'endpoint', ['/photon/test/addresses', '/photon/test/search', '/photon/test/reverse']
)
@pytest.mark.parametrize(
'content',
[
'',
'{"features": ""}',
'{"features": null}',
'{"features": [null]}',
'{"features": [{}]}',
'{"features": [{"properties": null, "geometry": null}]}',
'{"features": [{"properties": {}, "geometry": {}}]}',
'{"features": [{"properties": {}, "geometry": {"type": ""}}]}',
'{"features": [{"properties": {}, "geometry": {"type": "", "coordinates": null}}]}',
'{"features": [{"properties": {}, "geometry": {"type": "", "coordinates": [42]}}]}',
],
)
@mock.patch('passerelle.utils.Request.get')
def test_photon_bad_geojson_response(mocked_get, content, endpoint, app, photon):
mocked_get.return_value = tests.utils.FakedResponse(content=content, status_code=200)
resp = app.get(endpoint, params={'q': 'plop', 'lat': 48, 'lon': 2})
assert resp.json['err'] == 1

View File

@ -5777,6 +5777,38 @@ def test_update_recurrent_week_empty(family_service, activity_service, con, app)
assert resp.json['data'] == 'ok'
def test_update_recurrent_week_null(family_service, activity_service, con, app):
def request_check(request):
assert serialize_object(request.dayWeekInfoList) == [
{'dayNum': 1, 'isPresent': False, 'isOpen': None, 'calendarLetter': None},
{'dayNum': 2, 'isPresent': False, 'isOpen': None, 'calendarLetter': None},
{'dayNum': 3, 'isPresent': False, 'isOpen': None, 'calendarLetter': None},
{'dayNum': 4, 'isPresent': False, 'isOpen': None, 'calendarLetter': None},
{'dayNum': 5, 'isPresent': False, 'isOpen': None, 'calendarLetter': None},
{'dayNum': 6, 'isPresent': False, 'isOpen': None, 'calendarLetter': None},
{'dayNum': 7, 'isPresent': False, 'isOpen': None, 'calendarLetter': None},
]
family_service.add_soap_response('readFamily', get_xml_file('R_read_family.xml'))
activity_service.add_soap_response(
'updateWeekCalendar',
get_xml_file('R_update_week_calendar.xml'),
request_check=request_check,
)
url = get_endpoint('update-recurrent-week')
params = {
'person_id': '613880',
'activity_id': 'A10049327682',
'start_date': '2023-04-01',
'end_date': '2023-04-30',
'recurrent_week': None,
}
resp = app.post_json(url + '?family_id=311323', params=params)
assert resp.json['err'] == 0
assert resp.json['data'] == 'ok'
def test_update_recurrent_week_not_linked_error(con, app):
url = get_endpoint('update-recurrent-week')
params = {
@ -6018,6 +6050,9 @@ def test_read_activity_list(activity_service, con, app):
resp = app.get(url, params=params)
assert resp.json['err'] == 0
assert len(resp.json['data']) == 8
import pdb
pdb.set_trace()
assert [
(
x['id'],

View File

@ -47,6 +47,7 @@ deps =
ldaptools
Pillow<9.5.0
python-ldap<=3.2 # align with Debian <= 11 (buster, bullseye)
cryptography<39
git+https://git.entrouvert.org/publik-django-templatetags.git
commands =
./get_wcs.sh