Compare commits

..

10 Commits

Author SHA1 Message Date
Frédéric Péters 9a892a0e77 general: remove no longer necessary perm='can_access' (#78041)
gitea/passerelle/pipeline/head This commit looks good Details
2023-05-31 16:22:00 +02:00
Frédéric Péters e4a9d16719 general: be explicit about open endpoints (#78041) 2023-05-31 16:21:20 +02:00
Corentin Sechet 29b8775a16 translation update
gitea/passerelle/pipeline/head This commit looks good Details
2023-05-31 14:36:39 +02:00
Nicolas Roche d176d9fc4b toulouse-maelis: add service criteria to catalog (#77084)
gitea/passerelle/pipeline/head This commit looks good Details
2023-05-31 10:35:59 +02:00
Nicolas Roche 8df0c9ec11 toulouse-maelis: accept an empty string as recurrent week (#78009)
gitea/passerelle/pipeline/head This commit looks good Details
2023-05-30 20:43:18 +02:00
Nicolas Roche d21669a250 toulouse-maelis: use dedicated soap input on reccurent week tests (#78009) 2023-05-30 20:43:18 +02:00
Emmanuel Cazenave b74e848dbd esup_signature: add a field to define HTTP headers (#78003)
gitea/passerelle/pipeline/head This commit looks good Details
2023-05-30 16:31:10 +02:00
Emmanuel Cazenave 0c06086585 esup_signature: add parameters to the 'new' endpoint (#77670)
gitea/passerelle/pipeline/head This commit looks good Details
2023-05-30 13:39:30 +02:00
Emmanuel Cazenave 6b74e9a632 esup_signature: send standard parameters through the query string (#77670) 2023-05-30 13:29:50 +02:00
Emmanuel Cazenave 7102c3150a esup_signature: add new-with-workflow endpoint (#77670) 2023-05-30 13:29:23 +02:00
8 changed files with 1004 additions and 113 deletions

View File

@ -0,0 +1,21 @@
# Generated by Django 3.2.18 on 2023-05-30 14:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('esup_signature', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='esupsignature',
name='forced_headers',
field=models.TextField(
blank=True,
help_text='Headers to always add (one per line, format "Header-Name: value")',
verbose_name='Headers',
),
),
]

View File

@ -35,7 +35,7 @@ SIGN_REQUEST_SCHEMA = {
'title': '',
'description': '',
'type': 'object',
'required': ['file', 'recipients_emails', 'eppn'],
'required': ['file', 'recipients_emails', 'create_by_eppn'],
'unflatten': True,
'properties': collections.OrderedDict(
{
@ -62,27 +62,149 @@ SIGN_REQUEST_SCHEMA = {
'description': 'Recipients emails',
'items': {'type': 'string'},
},
'eppn': {'type': 'string', 'description': 'EPPN of the sign request owner'},
'recipients_cc_emails': {
'type': 'array',
'description': 'Recipients CC emails',
'items': {'type': 'string'},
},
'all_sign_to_complete': {
'type': 'string',
'description': 'Every recipient has to sign',
'enum': ['true', 'false'],
'default': 'false',
},
'user_sign_first': {
'type': 'string',
'description': 'the author must sign first',
'enum': ['true', 'false'],
'default': 'false',
},
'pending': {
'type': 'string',
'description': 'Pending',
'enum': ['true', 'false'],
'default': 'true',
},
'force_all_sign': {
'type': 'string',
'description': 'Force signing on every document',
'enum': ['true', 'false'],
'default': 'false',
},
'comment': {'type': 'string', 'description': 'Comment'},
'sign_type': {
'type': 'string',
'description': 'Signature type',
'enum': ['visa', 'pdfImageStamp', 'certSign', 'nexuSign'],
'default': 'pdfImageStamp',
},
'create_by_eppn': {'type': 'string', 'description': 'EPPN of the sign request owner'},
'title': {'type': 'string', 'description': 'Title'},
'target_url': {
'type': 'string',
'description': 'End location',
},
}
),
}
SIGN_REQUEST_WITH_WORKFLOW_SCHEMA = {
'$schema': 'http://json-schema.org/draft-04/schema#',
'title': '',
'description': '',
'type': 'object',
'required': ['file', 'eppn', 'workflow_id'],
'unflatten': True,
'properties': collections.OrderedDict(
{
'file': {
'type': 'object',
'description': 'File object',
'required': ['filename', 'content_type', 'content'],
'properties': {
'filename': {
'type': 'string',
},
'content_type': {
'type': 'string',
'description': 'MIME content-type',
},
'content': {
'type': 'string',
'description': 'Content, base64 encoded',
},
},
},
'recipients_emails': {
'type': 'array',
'description': 'Recipients emails at each step',
'items': {'type': 'string'},
},
'target_emails': {
'type': 'array',
'description': 'Target emails',
'items': {'type': 'string'},
},
'all_sign_to_completes': {
'type': 'array',
'description': 'Steps numbers were every recipient has to sign',
'items': {'type': 'string'},
},
'eppn': {'type': 'string', 'description': 'EPPN of the sign request owner'},
'workflow_id': {'type': 'string', 'description': 'Identifier of the workflow'},
'title': {'type': 'string', 'description': 'Title'},
'target_urls': {
'type': 'array',
'description': 'End locations',
'items': {'type': 'string'},
},
'signrequest_params_jsonstring': {
'type': 'string',
'description': 'Signature parameters',
},
}
),
}
def clean_list(some_list):
return [elem for elem in some_list if elem]
def to_bool(some_str):
return some_str == 'true'
class EsupSignature(BaseResource, HTTPResource):
base_url = models.URLField(_('API URL'))
forced_headers = models.TextField(
_('Headers'),
blank=True,
help_text=_('Headers to always add (one per line, format "Header-Name: value")'),
)
category = _('Business Process Connectors')
class Meta:
verbose_name = _('Esup Signature')
def _call(self, path, method='get', data=None, files=None, expect_json=True):
def _call(self, path, method='get', params=None, files=None, expect_json=True):
url = urllib.parse.urljoin(self.base_url, path)
kwargs = {}
headers = {}
for header in self.forced_headers.splitlines():
header = header.strip()
if header.startswith('#'):
continue
header = header.split(':', 1)
if len(header) == 2:
headers[header[0].strip()] = header[1].strip()
kwargs['headers'] = headers
if method == 'post':
kwargs['data'] = data
kwargs['params'] = params
kwargs['files'] = files
try:
@ -124,8 +246,18 @@ class EsupSignature(BaseResource, HTTPResource):
'recipients_emails/0': 'xx@foo.com',
'recipients_emails/1': 'yy@foo.com',
'recipients_emails/2': 'zz@foo.com',
'eppn': 'aa@foo.com',
'recipients_cc_emails/0': 'xx@foo.com',
'recipients_cc_emails/1': 'yy@foo.com',
'recipients_cc_emails/2': 'zz@foo.com',
'all_sign_to_complete': 'true',
'user_sign_first': 'false',
'pending': 'true',
'force_all_sign': 'false',
'comment': 'a comment',
'sign_type': 'pdfImageStamp',
'create_by_eppn': 'aa@foo.com',
'title': 'a title',
'target_url': 'smb://foo.bar/location-1/',
},
},
)
@ -142,16 +274,91 @@ class EsupSignature(BaseResource, HTTPResource):
)
}
recipients_emails = [email for email in post_data['recipients_emails'] if email]
data = {
'signType': 'pdfImageStamp',
'recipientsEmails': recipients_emails,
'eppn': post_data['eppn'],
params = {
'recipientsEmails': clean_list(post_data['recipients_emails']),
'recipientsCCEmails': clean_list(post_data.get('recipients_cc_emails', [])),
'comment': post_data.get('comment', ''),
'signType': post_data.get('sign_type', 'pdfImageStamp'),
'createByEppn': post_data['create_by_eppn'],
'title': post_data.get('title', ''),
'pending': True,
'targetUrl': post_data.get('target_url', ''),
}
return {'data': self._call('ws/signrequests/new', method='post', data=data, files=files)}
bool_params = {
'all_sign_to_complete': ('allSignToComplete', False),
'user_sign_first': ('userSignFirst', False),
'pending': ('pending', True),
'force_all_sign': ('forceAllSign', False),
}
for key, value in bool_params.items():
ext_param, default = value
params[ext_param] = default
if key in post_data:
params[ext_param] = to_bool(post_data[key])
return {'data': self._call('ws/signrequests/new', method='post', params=params, files=files)}
@endpoint(
name='new-with-workflow',
description=_('Create a sign request'),
post={
'request_body': {
'schema': {
'application/json': SIGN_REQUEST_WITH_WORKFLOW_SCHEMA,
}
},
'input_example': {
'file': {
'filename': 'example-1.pdf',
'content_type': 'application/pdf',
'content': 'JVBERi0xL...(base64 PDF)...',
},
'workflow_id': '99',
'eppn': 'aa@foo.com',
'title': 'a title',
'recipients_emails/0': '0*xx@foo.com',
'recipients_emails/1': '0*yy@foo.com',
'recipients_emails/2': '1*zz@foo.com',
'all_sign_to_completes/0': '12',
'all_sign_to_completes/1': '13',
'target_emails/0': 'xx@foo.com',
'target_emails/1': 'yy@foo.com',
'target_emails/2': 'zz@foo.com',
'signrequest_params_jsonstring': 'List [ OrderedMap { "xPos": 100, "yPos": 100, "signPageNumber": 1 }, '
'OrderedMap { "xPos": 200, "yPos": 200, "signPageNumber": 1 } ]',
'target_urls/0': 'smb://foo.bar/location-1/',
'target_urls/1': 'smb://foo.bar/location-2/',
},
},
)
def new_with_workflow(self, request, post_data):
try:
file_bytes = io.BytesIO(base64.b64decode(post_data['file']['content']))
except (TypeError, binascii.Error):
raise APIError("Can't decode file")
files = {
'multipartFiles': (
post_data['file']['filename'],
file_bytes,
post_data['file']['content_type'],
)
}
params = {
'createByEppn': post_data['eppn'],
'title': post_data.get('title', ''),
'recipientsEmails': clean_list(post_data.get('recipients_emails', [])),
'allSignToCompletes': clean_list(post_data.get('all_sign_to_completes', [])),
'targetEmails': clean_list(post_data.get('target_emails', [])),
'signRequestParamsJsonString': post_data.get('signrequest_params_jsonstring', ''),
'targetUrls': clean_list(post_data.get('target_urls', [])),
}
return {
'data': self._call(
'/ws/workflows/%s/new' % post_data['workflow_id'], method='post', params=params, files=files
)
}
@endpoint(
methods=['get'],

View File

@ -83,6 +83,23 @@ BOOKING_ACTIVITY_SCHEMA = {
],
}
RECURRENT_WEEK_SCHEMA = {
'oneOf': [
{
'type': 'array',
'items': {
'type': 'string',
'pattern': '^[1-7]-[A-Z]$',
},
},
{'type': 'null'},
{
'type': 'string',
'pattern': '^$',
},
],
}
UPDATE_RECURRENT_WEEK_SCHEMA = {
'type': 'object',
'properties': {
@ -102,13 +119,7 @@ UPDATE_RECURRENT_WEEK_SCHEMA = {
'type': 'string',
'pattern': '^([0-9]{4}-[0-9]{2}-[0-9]{2}){0,1}$',
},
'recurrent_week': {
'type': 'array',
'items': {
'type': 'string',
'pattern': '^[1-7]-[A-Z]$',
},
},
'recurrent_week': RECURRENT_WEEK_SCHEMA,
},
'required': [
'person_id',
@ -147,15 +158,7 @@ SUBSCRIPTION_SCHEMA = {
'type': 'string',
'pattern': '^[0-9]{4}-[0-9]{2}-[0-9]{2}$',
},
'recurrent_week': {
'oneOf': [
{
'type': 'array',
'items': {'type': 'string'},
},
{'type': 'null'},
],
},
'recurrent_week': RECURRENT_WEEK_SCHEMA,
'conveyanceSubscribe': {
'type': 'object',
'properties': {

View File

@ -352,12 +352,12 @@ class ToulouseMaelis(BaseResource, HTTPResource):
return [x.item_data for x in queryset]
def get_referential_value(self, referential_name, key):
def get_referential_value(self, referential_name, key, default='key'):
try:
return self.referential.get(referential_name=referential_name, item_id=key).item_text
except Referential.DoesNotExist:
self.logger.warning("No '%s' key into Maelis '%s' referential", key, referential_name)
return key
return key if default == 'key' else None
def get_link(self, NameID):
try:
@ -2658,6 +2658,7 @@ class ToulouseMaelis(BaseResource, HTTPResource):
def read_activity_list(self, request, ref_date=None):
reference_year = utils.get_reference_year_from_date(ref_date or now())
labels = {
'service': 'Service',
'nature': "Nature de l'activité",
'type': "Type de l'activité",
'public': 'Public',
@ -2687,6 +2688,8 @@ class ToulouseMaelis(BaseResource, HTTPResource):
activities = catalogs[0]['data'] if catalogs else []
def add_criteria(label_key, criteria_key, criteria_value):
if not criteria_value:
return
criterias[label_key]['data'][criteria_key] = criteria_value
if criteria_key not in all_criterias[label_key]['data']:
all_criterias[label_key]['data'][criteria_key] = criteria_value
@ -2713,19 +2716,23 @@ class ToulouseMaelis(BaseResource, HTTPResource):
activity['text'] = (
activity['activityPortail']['libelle2'] or activity['activityPortail']['libelle']
)
service_id = activity['activityPortail']['idService']
service_text = self.get_referential_value('Service', service_id, default=None)
activity['activityPortail']['idService_text'] = service_text
for label_key in criterias:
criterias[label_key]['data'] = {}
add_criteria('nature', activity_nature['code'], activity_nature['libelle'])
type_value = activity_type['libelle'].split('-')[0].strip()
add_criteria('type', slugify(type_value), type_value)
add_criteria('service', service_id, service_text)
if activity['activityPortail']['weeklyCalendarActivityList']:
for day in activity['activityPortail']['weeklyCalendarActivityList'][0]['dayWeekInfoList']:
if day['isOpen']:
add_criteria('day', str(day['dayNum']), day_names[day['dayNum'] - 1])
update_criterias_order_field(criterias, ['nature', 'type', 'day'])
update_criterias_order_field(criterias, ['service', 'nature', 'type', 'day'])
for unit in activity.pop('unitPortailList'):
unit['id'] = unit['idUnit']
@ -2763,7 +2770,7 @@ class ToulouseMaelis(BaseResource, HTTPResource):
'meta': {
'reference_year': reference_year,
'all_criterias': all_criterias,
'all_criterias_order': ['nature', 'type', 'public', 'day', 'place'],
'all_criterias_order': ['service', 'nature', 'type', 'public', 'day', 'place'],
},
}

View File

@ -7,7 +7,7 @@ msgid ""
msgstr ""
"Project-Id-Version: Passerelle 0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-05-16 11:03+0200\n"
"POT-Creation-Date: 2023-05-31 14:01+0200\n"
"PO-Revision-Date: 2023-05-03 11:19+0200\n"
"Last-Translator: Frederic Peters <fpeters@entrouvert.com>\n"
"Language: fr\n"
@ -688,6 +688,7 @@ msgstr "Nom de fichier"
#: apps/astre_rest/models.py apps/cmis/models.py apps/filr_rest/schemas.py
#: apps/franceconnect_data/models.py apps/jsondatastore/models.py
#: contrib/toulouse_foederis/models.py
msgid "Content"
msgstr "Contenu"
@ -2223,6 +2224,16 @@ msgstr "Récupérer un rendez-vous :"
msgid "Delete appointment"
msgstr "Supprimer un rendez-vous"
#: apps/esup_signature/models.py apps/proxy/models.py
msgid "Headers"
msgstr "Entêtes HTTP"
#: apps/esup_signature/models.py apps/proxy/models.py
msgid "Headers to always add (one per line, format \"Header-Name: value\")"
msgstr ""
"Entêtes HTTP à ajouter à toute requête (une par ligne, au format \"Header-"
"Name: value\")"
#: apps/esup_signature/models.py
msgid "Esup Signature"
msgstr "Esup Signature"
@ -3953,16 +3964,6 @@ msgstr "URL de base du système cible"
msgid "Timeout on upstream (in seconds)"
msgstr "Délai dattente pour les requêtes vers la cible (timeout, en secondes)"
#: apps/proxy/models.py
msgid "Headers"
msgstr "Entêtes HTTP"
#: apps/proxy/models.py
msgid "Headers to always add (one per line, format \"Header-Name: value\")"
msgstr ""
"Entêtes HTTP à ajouter à toute requête (une par ligne, au format \"Header-"
"Name: value\")"
#: apps/proxy/models.py
msgid "Proxy"
msgstr "Proxy"
@ -5013,7 +5014,7 @@ msgstr "Obtenir une facture au format PDF"
msgid "Notify an invoice as paid"
msgstr "Notifier le paiement de la facture"
#: contrib/caluire_axel/schemas.py
#: contrib/caluire_axel/schemas.py contrib/toulouse_foederis/models.py
msgid ""
"Values \"0\", \"1\", \"O\", \"N\", \"true\" or \"false\" are allowed (case "
"insensitive)."
@ -6105,50 +6106,6 @@ msgstr "Import dune demande"
msgid "Referential"
msgstr "Référentiels"
#: contrib/solis_apa/templates/passerelle/contrib/solis_apa/detail.html
msgid "Communes:"
msgstr "Communes :"
#: contrib/solis_apa/templates/passerelle/contrib/solis_apa/detail.html
msgid "Lieux:"
msgstr "Lieux :"
#: contrib/solis_apa/templates/passerelle/contrib/solis_apa/detail.html
msgid "Homonymes:"
msgstr "Homonymes :"
#: contrib/solis_apa/templates/passerelle/contrib/solis_apa/detail.html
msgid "Caisses de retraite"
msgstr "Caisses de retraite"
#: contrib/solis_apa/templates/passerelle/contrib/solis_apa/detail.html
msgid "Organimes de tutelle"
msgstr "Organismes de tutelle"
#: contrib/solis_apa/templates/passerelle/contrib/solis_apa/detail.html
msgid "Etablissement"
msgstr "Établissement"
#: contrib/solis_apa/templates/passerelle/contrib/solis_apa/detail.html
msgid "Suivi"
msgstr "Suivi"
#: contrib/solis_apa/templates/passerelle/contrib/solis_apa/detail.html
msgid "Visite"
msgstr "Visite"
#: contrib/solis_apa/templates/passerelle/contrib/solis_apa/detail.html
msgid "Plan d aide "
msgstr "Plan daide"
#: contrib/solis_apa/templates/passerelle/contrib/solis_apa/detail.html
msgid "Presentation Commission"
msgstr "Commission de présentation"
#: contrib/solis_apa/templates/passerelle/contrib/solis_apa/detail.html
msgid "Decision Commission"
msgstr "Commission de décision"
#: contrib/solis_apa/views.py
msgid "Unknown suivi type"
msgstr "Type de suivi inconnu"
@ -6433,6 +6390,206 @@ msgstr "Réservation CLAE/Cantine"
msgid "CLAE/Cantine annual booking"
msgstr "Réservation annuelle CLAE/Cantine"
#: contrib/toulouse_foederis/models.py
msgid "Attachment data."
msgstr "Données de la pièce jointe."
#: contrib/toulouse_foederis/models.py
msgid "ID of the application to which to attach the file."
msgstr "Identifiant de la candidature à laquelle attacher le fichier."
#: contrib/toulouse_foederis/models.py
msgid "Name of the attachment."
msgstr "Nom de la pièce jointe."
#: contrib/toulouse_foederis/models.py
msgid "File to attach."
msgstr "Fichier à attacher."
#: contrib/toulouse_foederis/models.py
msgid "File name"
msgstr "Nom du fichier"
#: contrib/toulouse_foederis/models.py
msgid "MIME type"
msgstr "type MIME du contenu"
#: contrib/toulouse_foederis/models.py
msgid "Application Type (External or Internal)."
msgstr "Type de candidature (Externe ou Interne)."
#: contrib/toulouse_foederis/models.py
msgid "ID of the concerned job offer."
msgstr "Identifiant de la candidature concernée."
#: contrib/toulouse_foederis/models.py
msgid "ID of an element of the data source 'civilite'."
msgstr "Identifiant d'un élément de la source de données 'civilite'."
#: contrib/toulouse_foederis/models.py
msgid "Applicant first name."
msgstr "Prénom du candidat."
#: contrib/toulouse_foederis/models.py
msgid "Applicant last name."
msgstr "Nom du candidat."
#: contrib/toulouse_foederis/models.py
msgid "Applicant gender."
msgstr "Genre du candidat."
#: contrib/toulouse_foederis/models.py
msgid "Applicant birth date."
msgstr "Date de naissance du candidat."
#: contrib/toulouse_foederis/models.py
msgid "ID of an element of the data source 'nationalite'."
msgstr "Identifiant d'un élément de la source de données 'nationalite'."
#: contrib/toulouse_foederis/models.py
msgid "Applicant end of working authorization, if nationality is 'other'."
msgstr "Date de fin d'autorisation de travail du candidat, si sa nationalité est 'autre'."
#: contrib/toulouse_foederis/models.py
msgid "RQTH."
msgstr "RQTH."
#: contrib/toulouse_foederis/models.py
msgid "End of RQTH, or none if not applicable."
msgstr "Fin de RQTH, ou None si non applicable."
#: contrib/toulouse_foederis/models.py
msgid "Driving license."
msgstr "Permis de conduire."
#: contrib/toulouse_foederis/models.py
msgid "FIMO licence."
msgstr "FIMO."
#: contrib/toulouse_foederis/models.py
msgid "FIMO licence delivrance date."
msgstr "Date de délivrance de la FIMO."
#: contrib/toulouse_foederis/models.py
msgid "FIMO licence end validity date."
msgstr "Date de fin de validité de la FIMO."
#: contrib/toulouse_foederis/models.py
msgid "ID of an element of the data source 'situation-actuelle'."
msgstr "Identifiant d'une élément de la source de données 'situation actuelle'."
#: contrib/toulouse_foederis/models.py
msgid "Agent's collectivity"
msgstr "Colectivité de l'agent"
#: contrib/toulouse_foederis/models.py
msgid "Applicant availability start date."
msgstr "Date de début de disponibilité du candidat."
#: contrib/toulouse_foederis/models.py
msgid "Applicant availability end date."
msgstr "Date de fin de disponibilité du candidat."
#: contrib/toulouse_foederis/models.py
msgid "Applicant salary expectations."
msgstr "Prétentions salariales du candidat."
#: contrib/toulouse_foederis/models.py
msgid "Applicant address."
msgstr "Adresse du candidat."
#: contrib/toulouse_foederis/models.py
msgid "Applicant address complement."
msgstr "Complément dadresse."
#: contrib/toulouse_foederis/models.py
msgid "Applicant zip code."
msgstr "Code postal."
#: contrib/toulouse_foederis/models.py
msgid "Applicant city."
msgstr "Ville du candidat."
#: contrib/toulouse_foederis/models.py
msgid "Applicant phone number."
msgstr "Numéro de téléphone du candidat."
#: contrib/toulouse_foederis/models.py
msgid "Applicant email."
msgstr "Courriel du candidat."
#: contrib/toulouse_foederis/models.py
msgid "Applicant contract start date."
msgstr "Date de début de contrat du candidat."
#: contrib/toulouse_foederis/models.py
msgid "Applicant contract end date."
msgstr "Date de fin de contrat du candidat."
#: contrib/toulouse_foederis/models.py
msgid "Application information complement."
msgstr "Information complémentaires sur la candidature."
#: contrib/toulouse_foederis/models.py
msgid "ID of an element of the data source 'origine-candidature'."
msgstr "Identifiant d'un élément de la source de données 'origine-candidature'."
#: contrib/toulouse_foederis/models.py
msgid "Precisions if 'origine' is 'other'."
msgstr "Précisions si 'origine' est 'autre'."
#: contrib/toulouse_foederis/models.py
msgid "RGPD agreement."
msgstr "Accord RGPD."
#: contrib/toulouse_foederis/models.py
msgid "IDs of elements of the data source 'type-emploi'."
msgstr "Identifiants d'éléments de la source de données 'type-emploi'."
#: contrib/toulouse_foederis/models.py
msgid "IDs of elements of the data source 'domaine-emploi'."
msgstr "Identifiants d'éléments de la source de données 'domaine-emploi'."
#: contrib/toulouse_foederis/models.py
msgid "IDs of elements of the data source 'sous-domaine-emploi'."
msgstr "Identifiants d'éléments de la source de données 'sous-domaine-emploi'."
#: contrib/toulouse_foederis/models.py
msgid "IDs of elements of the data source 'emploi'."
msgstr "Identifiants d'éléments de la source de données 'emploi'."
#: contrib/toulouse_foederis/models.py
msgid "TC / TNC."
msgstr "Temps de travail désiré (TC / TNC)."
#: contrib/toulouse_foederis/models.py
msgid "Duration of the desired internship."
msgstr "Durée désirée du stage."
#: contrib/toulouse_foederis/models.py
msgid "Candidate trainee's school name."
msgstr "Nom de l'école du candidat stagiaire."
#: contrib/toulouse_foederis/models.py
msgid "Candidate trainee's diploma name."
msgstr "Nom du diplôme du candidat stagiaire."
#: contrib/toulouse_foederis/models.py
msgid "Candidate trainee's diploma speciality."
msgstr "Spécialité du diplôme du candidat stagiaire."
#: contrib/toulouse_foederis/models.py
msgid "ID of an element of the data source 'niveau-diplome'."
msgstr "Identifiant d'un élément de la source de données 'niveau-diplome'."
#: contrib/toulouse_foederis/models.py
msgid "Candidate trainee's last obtained diploma."
msgstr "Dernier diplôme obtenu par le candidat stagiaire."
#: contrib/toulouse_foederis/models.py
msgid "Candidate trainee's last taken course."
msgstr "Dernier cours suivi par le candidat stagiaire."
#: contrib/toulouse_foederis/models.py
msgid "Foederis connector"
msgstr "Connecteur Foederis"
@ -6497,6 +6654,14 @@ msgstr "Ne pas utiliser directement, passer par le champ pdf_url à la place."
msgid "Announce id"
msgstr "Identifiant de lannonce"
#: contrib/toulouse_foederis/models.py
msgid "Creates an application"
msgstr "Créer une candidature"
#: contrib/toulouse_foederis/models.py
msgid "Attach a file to an application."
msgstr "Attacher un fichier à une candidature."
#: contrib/toulouse_foederis/models.py
msgid "List announces"
msgstr "Lister les annonces"
@ -6793,16 +6958,6 @@ msgstr "Vieilleries"
msgid "Legacy data sources and services"
msgstr "Anciens services et sources de données"
#: templates/passerelle/manage.html
#: templates/passerelle/manage/apiuser_list.html
msgid "Access Management"
msgstr "Gestion des accès"
#: templates/passerelle/manage.html
#: templates/passerelle/manage/service_form.html
msgid "Add Connector"
msgstr "Ajouter un connecteur"
#: templates/passerelle/manage.html
#: templates/passerelle/manage/import_site.html
msgid "Import"
@ -6813,6 +6968,16 @@ msgstr "Importer"
msgid "Export"
msgstr "Exporter"
#: templates/passerelle/manage.html
#: templates/passerelle/manage/apiuser_list.html
msgid "Access Management"
msgstr "Gestion des accès"
#: templates/passerelle/manage.html
#: templates/passerelle/manage/service_form.html
msgid "Add Connector"
msgstr "Ajouter un connecteur"
#: templates/passerelle/manage.html
msgid "open access"
msgstr "accès libre"
@ -6946,14 +7111,14 @@ msgstr "Journaux"
msgid "(supports text search in messages, or dates)"
msgstr "(sur la date ou le texte des messages)"
#: templates/passerelle/manage/service_view.html
msgid "Edit"
msgstr "Modifier"
#: templates/passerelle/manage/service_view.html
msgid "Logging parameters"
msgstr "Paramètres de journalisation"
#: templates/passerelle/manage/service_view.html
msgid "Edit"
msgstr "Modifier"
#: templates/passerelle/manage/service_view.html
msgid "yes"
msgstr "oui"

View File

@ -6,6 +6,7 @@
<activityPortail>
<idAct>A10051141965</idAct>
<libelle>Vitrail Fusing 1/2 Je Adultes 2022/2023 - Mardi 14h-17h</libelle>
<idService>A10049329051</idService>
<dateStart>2022-09-01T00:00:00+02:00</dateStart>
<dateEnd>2023-06-30T00:00:00+02:00</dateEnd>
<birthControl>N</birthControl>
@ -624,6 +625,247 @@
</placeList>
</unitPortailList>
</activityUnitPlacePortailList>
<activityUnitPlacePortailList>
<activityPortail>
<idAct>A10056514645</idAct>
<libelle>TEST ECOLE DES SPORTS 22/23 SEMESTRE 2 - MULTIACTIVITES</libelle>
<blocNoteList>
<note>Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Maecenas porttitor congue massa. Fusce posuere, magna sed pulvinar ultricies, purus lectus malesuada libero, sit amet commodo magna eros quis</note>
<numIndex>1</numIndex>
</blocNoteList>
<dateStart>2023-02-01T00:00:00+01:00</dateStart>
<dateEnd>2023-06-30T00:00:00+02:00</dateEnd>
<birthControl>B</birthControl>
<schoolYear>2022</schoolYear>
<calendarGeneration>
<code>FORBIDDEN</code>
<value>I</value>
</calendarGeneration>
<calendarMode>C</calendarMode>
<activityType>
<code>1-AAQ</code>
<libelle>Activités Aquatiques Activité Réguliére</libelle>
<natureSpec>
<code>1</code>
<libelle>Activités Régulières</libelle>
</natureSpec>
</activityType>
<weeklyCalendarActivityList>
<yearCalendar>2023</yearCalendar>
<dayWeekInfoList>
<dayNum>1</dayNum>
<isOpen>false</isOpen>
</dayWeekInfoList>
<dayWeekInfoList>
<dayNum>2</dayNum>
<isOpen>false</isOpen>
</dayWeekInfoList>
<dayWeekInfoList>
<dayNum>3</dayNum>
<isOpen>true</isOpen>
</dayWeekInfoList>
<dayWeekInfoList>
<dayNum>4</dayNum>
<isOpen>false</isOpen>
</dayWeekInfoList>
<dayWeekInfoList>
<dayNum>5</dayNum>
<isOpen>false</isOpen>
</dayWeekInfoList>
<dayWeekInfoList>
<dayNum>6</dayNum>
<isOpen>false</isOpen>
</dayWeekInfoList>
<dayWeekInfoList>
<dayNum>7</dayNum>
<isOpen>false</isOpen>
</dayWeekInfoList>
</weeklyCalendarActivityList>
</activityPortail>
<openDayList>2023-02-01T00:00:00+01:00</openDayList>
<openDayList>2023-02-08T00:00:00+01:00</openDayList>
<openDayList>2023-02-15T00:00:00+01:00</openDayList>
<openDayList>2023-02-22T00:00:00+01:00</openDayList>
<openDayList>2023-03-01T00:00:00+01:00</openDayList>
<openDayList>2023-03-08T00:00:00+01:00</openDayList>
<openDayList>2023-03-15T00:00:00+01:00</openDayList>
<openDayList>2023-03-22T00:00:00+01:00</openDayList>
<openDayList>2023-03-29T00:00:00+02:00</openDayList>
<openDayList>2023-04-05T00:00:00+02:00</openDayList>
<openDayList>2023-04-12T00:00:00+02:00</openDayList>
<openDayList>2023-04-19T00:00:00+02:00</openDayList>
<openDayList>2023-04-26T00:00:00+02:00</openDayList>
<openDayList>2023-05-03T00:00:00+02:00</openDayList>
<openDayList>2023-05-10T00:00:00+02:00</openDayList>
<openDayList>2023-05-17T00:00:00+02:00</openDayList>
<openDayList>2023-05-24T00:00:00+02:00</openDayList>
<openDayList>2023-05-31T00:00:00+02:00</openDayList>
<openDayList>2023-06-07T00:00:00+02:00</openDayList>
<openDayList>2023-06-14T00:00:00+02:00</openDayList>
<openDayList>2023-06-21T00:00:00+02:00</openDayList>
<openDayList>2023-06-28T00:00:00+02:00</openDayList>
<unitPortailList>
<idUnit>A10056514650</idUnit>
<libelle>MERCREDI - 13h45/17h - 8/15Ans</libelle>
<dateStart>2023-02-01T00:00:00+01:00</dateStart>
<dateEnd>2023-06-30T00:00:00+02:00</dateEnd>
<birthDateStart>2008-01-01T00:00:00+01:00</birthDateStart>
<birthDateEnd>2015-12-31T00:00:00+01:00</birthDateEnd>
<calendarLetter>B</calendarLetter>
<subscribePublication>E</subscribePublication>
<numOrder>0</numOrder>
<calendarPublication>N</calendarPublication>
<recordAbsence>O</recordAbsence>
<placeList>
<id>A10053179757</id>
<lib>ARGOULETS</lib>
<adresse>
<num>0</num>
</adresse>
<startHour>13:45</startHour>
<endHour>17:00</endHour>
<capacityInfo>
<controlOK>true</controlOK>
</capacityInfo>
</placeList>
</unitPortailList>
<unitPortailList>
<idUnit>A10056514648</idUnit>
<libelle>MERCREDI - 14h/16h30 - 10/15Ans</libelle>
<dateStart>2023-02-01T00:00:00+01:00</dateStart>
<dateEnd>2023-06-30T00:00:00+02:00</dateEnd>
<birthDateStart>2008-01-01T00:00:00+01:00</birthDateStart>
<birthDateEnd>2013-12-31T00:00:00+01:00</birthDateEnd>
<calendarLetter>C</calendarLetter>
<subscribePublication>E</subscribePublication>
<numOrder>0</numOrder>
<calendarPublication>N</calendarPublication>
<recordAbsence>O</recordAbsence>
<placeList>
<id>A10053179876</id>
<lib>LA RAMEE</lib>
<adresse>
<num>0</num>
</adresse>
<startHour>14:00</startHour>
<endHour>16:30</endHour>
<capacityInfo>
<controlOK>true</controlOK>
</capacityInfo>
</placeList>
</unitPortailList>
<unitPortailList>
<idUnit>A10056514649</idUnit>
<libelle>MERCREDI - 15h30/17h - 8/15Ans</libelle>
<dateStart>2023-02-01T00:00:00+01:00</dateStart>
<dateEnd>2023-06-30T00:00:00+02:00</dateEnd>
<birthDateStart>2008-01-01T00:00:00+01:00</birthDateStart>
<birthDateEnd>2015-12-31T00:00:00+01:00</birthDateEnd>
<calendarLetter>F</calendarLetter>
<subscribePublication>E</subscribePublication>
<numOrder>0</numOrder>
<calendarPublication>N</calendarPublication>
<recordAbsence>O</recordAbsence>
<placeList>
<id>A10053179757</id>
<lib>ARGOULETS</lib>
<adresse>
<num>0</num>
</adresse>
<startHour>15:30</startHour>
<endHour>17:00</endHour>
<capacityInfo>
<controlOK>true</controlOK>
</capacityInfo>
</placeList>
</unitPortailList>
</activityUnitPlacePortailList>
<activityUnitPlacePortailList>
<activityPortail>
<idAct>A10056517594</idAct>
<libelle>TEST promenade forêt enchantée</libelle>
<libelle2>Promenade forêt enchantée</libelle2>
<blocNoteList>
<note>Activité senior du 15 au 16 juin 2023</note>
<numIndex>1</numIndex>
</blocNoteList>
<idService>plop</idService>
<dateStart>2023-06-15T00:00:00+02:00</dateStart>
<dateEnd>2023-06-16T00:00:00+02:00</dateEnd>
<birthControl>B</birthControl>
<schoolYear>2022</schoolYear>
<calendarGeneration>
<code>FORBIDDEN</code>
<value>I</value>
</calendarGeneration>
<calendarMode>N</calendarMode>
<activityType>
<code>1-APE</code>
<libelle>Activité Pédestre Activité régulière</libelle>
<natureSpec>
<code>1</code>
<libelle>Activités Régulières</libelle>
</natureSpec>
</activityType>
<weeklyCalendarActivityList>
<yearCalendar>2023</yearCalendar>
<dayWeekInfoList>
<dayNum>1</dayNum>
<isOpen>true</isOpen>
</dayWeekInfoList>
<dayWeekInfoList>
<dayNum>2</dayNum>
<isOpen>true</isOpen>
</dayWeekInfoList>
<dayWeekInfoList>
<dayNum>3</dayNum>
<isOpen>true</isOpen>
</dayWeekInfoList>
<dayWeekInfoList>
<dayNum>4</dayNum>
<isOpen>true</isOpen>
</dayWeekInfoList>
<dayWeekInfoList>
<dayNum>5</dayNum>
<isOpen>true</isOpen>
</dayWeekInfoList>
<dayWeekInfoList>
<dayNum>6</dayNum>
<isOpen>false</isOpen>
</dayWeekInfoList>
<dayWeekInfoList>
<dayNum>7</dayNum>
<isOpen>false</isOpen>
</dayWeekInfoList>
</weeklyCalendarActivityList>
</activityPortail>
<openDayList>2023-06-15T00:00:00+02:00</openDayList>
<openDayList>2023-06-16T00:00:00+02:00</openDayList>
<unitPortailList>
<idUnit>A10056517595</idUnit>
<libelle>TEST promenade forêt enchantée</libelle>
<codeExt>A</codeExt>
<dateStart>2023-06-15T00:00:00+02:00</dateStart>
<dateEnd>2023-06-16T00:00:00+02:00</dateEnd>
<birthDateStart>1900-01-01T00:00:00+01:00</birthDateStart>
<birthDateEnd>1963-12-31T00:00:00+01:00</birthDateEnd>
<calendarLetter>X</calendarLetter>
<subscribePublication>E</subscribePublication>
<numOrder>0</numOrder>
<calendarPublication>N</calendarPublication>
<recordAbsence>O</recordAbsence>
<placeList>
<id>A10056517597</id>
<lib>TERRITOIRE OUEST</lib>
<adresse>
<num>0</num>
</adresse>
<capacityInfo>
<controlOK>true</controlOK>
</capacityInfo>
</placeList>
</unitPortailList>
</activityUnitPlacePortailList>
</ReadActivityPortailListResultBean>
</ns2:readActivityListResponse>
</soap:Body>

View File

@ -31,11 +31,26 @@ def test_new(app, connector):
},
'recipients_emails/0': 'foo@invalid',
'recipients_emails/1': 'bar@invalid',
'eppn': 'baz@invalid',
'create_by_eppn': 'baz@invalid',
'title': 'a title',
}
with responses.RequestsMock() as rsps:
rsps.post('https://esup-signature.invalid/ws/signrequests/new', status=200, json=9)
query_params = {
'recipientsEmails': ['foo@invalid', 'bar@invalid'],
'createByEppn': 'baz@invalid',
'title': 'a title',
'signType': 'pdfImageStamp',
'pending': True,
'allSignToComplete': False,
'userSignFirst': False,
'forceAllSign': False,
}
rsps.post(
'https://esup-signature.invalid/ws/signrequests/new',
match=[responses.matchers.query_param_matcher(query_params)],
status=200,
json=9,
)
resp = app.post_json('/esup-signature/esup-signature/new', params=params)
assert len(rsps.calls) == 1
assert rsps.calls[0].request.headers['Content-Type'].startswith('multipart/form-data')
@ -44,6 +59,54 @@ def test_new(app, connector):
assert json_resp['data'] == 9
def test_new_with_workflow(app, connector):
params = {
'file': {
'filename': 'bla',
'content': base64.b64encode(b'who what').decode(),
'content_type': 'text/plain',
},
'workflow_id': '99',
'eppn': 'aa@foo.com',
'title': 'a title',
'recipients_emails/0': '0*xx@foo.com',
'recipients_emails/1': '0*yy@foo.com',
'recipients_emails/2': '1*zz@foo.com',
'all_sign_to_completes/0': '12',
'all_sign_to_completes/1': '13',
'target_emails/0': 'xx@foo.com',
'target_emails/1': 'yy@foo.com',
'target_emails/2': 'zz@foo.com',
'signrequest_params_jsonstring': 'List [ OrderedMap { "xPos": 100, "yPos": 100, "signPageNumber": 1 }, '
'OrderedMap { "xPos": 200, "yPos": 200, "signPageNumber": 1 } ]',
'target_urls/0': 'smb://foo.bar/location-1/',
'target_urls/1': 'smb://foo.bar/location-2/',
}
with responses.RequestsMock() as rsps:
query_params = {
'createByEppn': 'aa@foo.com',
'title': 'a title',
'recipientsEmails': ['0*xx@foo.com', '0*yy@foo.com', '1*zz@foo.com'],
'allSignToCompletes': ['12', '13'],
'targetEmails': ['xx@foo.com', 'yy@foo.com', 'zz@foo.com'],
'signRequestParamsJsonString': 'List [ OrderedMap { "xPos": 100, "yPos": 100, "signPageNumber": 1 }, '
'OrderedMap { "xPos": 200, "yPos": 200, "signPageNumber": 1 } ]',
'targetUrls': ['smb://foo.bar/location-1/', 'smb://foo.bar/location-2/'],
}
rsps.post(
'https://esup-signature.invalid/ws/workflows/99/new',
match=[responses.matchers.query_param_matcher(query_params)],
status=200,
json=9,
)
resp = app.post_json('/esup-signature/esup-signature/new-with-workflow', params=params)
assert len(rsps.calls) == 1
assert rsps.calls[0].request.headers['Content-Type'].startswith('multipart/form-data')
json_resp = resp.json
assert json_resp['err'] == 0
assert json_resp['data'] == 9
def test_status(app, connector):
with responses.RequestsMock() as rsps:
rsps.get('https://esup-signature.invalid/ws/signrequests/1', status=200, json={'status': 'completed'})
@ -76,3 +139,19 @@ def test_get_last_file(app, connector):
)
resp = app.get('/esup-signature/esup-signature/get-last-file?signrequests_id=1')
assert resp.text == 'who hwat'
def test_forced_headers(app, connector):
connector.forced_headers = 'X-Foo:bar'
connector.save()
with responses.RequestsMock() as rsps:
headers = {'Content-Type': 'text/plain', 'Content-Disposition': 'attachment; filename=foo.txt'}
rsps.get(
'https://esup-signature.invalid/ws/signrequests/get-last-file/1',
status=200,
body='who hwat',
headers=headers,
)
app.get('/esup-signature/esup-signature/get-last-file?signrequests_id=1')
headers = rsps.calls[0].request.headers
assert headers['X-Foo'] == 'bar'

View File

@ -5705,6 +5705,38 @@ def test_update_recurrent_week(family_service, activity_service, con, app):
assert resp.json['data'] == 'ok'
def test_update_recurrent_week_empty(family_service, activity_service, con, app):
def request_check(request):
assert serialize_object(request.dayWeekInfoList) == [
{'dayNum': 1, 'isPresent': False, 'isOpen': None, 'calendarLetter': None},
{'dayNum': 2, 'isPresent': False, 'isOpen': None, 'calendarLetter': None},
{'dayNum': 3, 'isPresent': False, 'isOpen': None, 'calendarLetter': None},
{'dayNum': 4, 'isPresent': False, 'isOpen': None, 'calendarLetter': None},
{'dayNum': 5, 'isPresent': False, 'isOpen': None, 'calendarLetter': None},
{'dayNum': 6, 'isPresent': False, 'isOpen': None, 'calendarLetter': None},
{'dayNum': 7, 'isPresent': False, 'isOpen': None, 'calendarLetter': None},
]
family_service.add_soap_response('readFamily', get_xml_file('R_read_family.xml'))
activity_service.add_soap_response(
'updateWeekCalendar',
get_xml_file('R_update_week_calendar.xml'),
request_check=request_check,
)
url = get_endpoint('update-recurrent-week')
params = {
'person_id': '613880',
'activity_id': 'A10049327682',
'start_date': '2023-04-01',
'end_date': '2023-04-30',
'recurrent_week': '',
}
resp = app.post_json(url + '?family_id=311323', params=params)
assert resp.json['err'] == 0
assert resp.json['data'] == 'ok'
def test_update_recurrent_week_not_linked_error(con, app):
url = get_endpoint('update-recurrent-week')
params = {
@ -5940,12 +5972,29 @@ def test_read_activity_list(activity_service, con, app):
activity_service.add_soap_response('readActivityList', get_xml_file('R_read_activity_list.xml'))
url = get_endpoint('read-activity-list')
con.loisir_nature_codes = '4,L,, S '
con.loisir_nature_codes = '1,4,L,, S '
con.save()
params = {'ref_date': '2023-01-01'}
resp = app.get(url, params=params)
assert resp.json['err'] == 0
assert len(resp.json['data']) == 4
assert len(resp.json['data']) == 8
assert [
(
x['id'],
x['activity']['activityPortail']['idService'],
x['activity']['activityPortail']['idService_text'],
)
for x in resp.json['data']
] == [
('A10051141965-A10051141966-A10053179226', 'A10049329051', 'Sorties'),
('A10051141965-A10051141968-A10053179226', 'A10049329051', 'Sorties'),
('A10051141965-A10051141970-A10053179226', 'A10049329051', 'Sorties'),
('A10051141965-A10051141990-A10053179227', 'A10049329051', 'Sorties'),
('A10056514645-A10056514650-A10053179757', None, None),
('A10056514645-A10056514648-A10053179876', None, None),
('A10056514645-A10056514649-A10053179757', None, None),
('A10056517594-A10056517595-A10056517597', 'plop', None),
]
item = resp.json['data'][0]
item['activity'] = 'N/A'
item['unit'] = 'N/A'
@ -5959,6 +6008,11 @@ def test_read_activity_list(activity_service, con, app):
'unit': 'N/A',
'place': 'N/A',
'criterias': {
'service': {
'text': 'Service',
'data': {'A10049329051': 'Sorties'},
'order': ['A10049329051'],
},
'nature': {'text': "Nature de l'activité", 'data': {'4': 'ART PLASTIQUE'}, 'order': ['4']},
'type': {
'text': "Type de l'activité",
@ -5985,6 +6039,59 @@ def test_read_activity_list(activity_service, con, app):
'day': {'text': 'Jours', 'data': {'2': 'Mardi'}, 'order': ['2']},
},
}
assert resp.json['meta'] == {
'reference_year': 2022,
'all_criterias': {
'service': {'text': 'Service', 'data': {'A10049329051': 'Sorties'}, 'order': ['A10049329051']},
'nature': {
'text': "Nature de l'activité",
'data': {'4': 'ART PLASTIQUE', '1': 'Activités Régulières'},
'order': ['1', '4'],
},
'type': {
'text': "Type de l'activité",
'data': {
'activite-reguliere': 'ACTIVITE REGULIERE',
'activites-aquatiques-activite-reguliere': 'Activités Aquatiques Activité Réguliére',
'activite-pedestre-activite-reguliere': 'Activité Pédestre Activité régulière',
},
'order': [
'activite-reguliere',
'activite-pedestre-activite-reguliere',
'activites-aquatiques-activite-reguliere',
],
},
'public': {
'text': 'Public',
'data': {
'0': 'Petit enfant (- de 3 ans)',
'1': 'Enfant (3-11 ans)',
'2': 'Ado (12-17 ans)',
'3': 'Jeune (18-25 ans)',
'4': 'Adulte (26-59 ans)',
'5': 'Sénior (60 ans et plus)',
},
'order': ['0', '1', '2', '3', '4', '5'],
},
'day': {
'text': 'Jours',
'data': {'2': 'Mardi', '3': 'Mercredi', '1': 'Lundi', '4': 'Jeudi', '5': 'Vendredi'},
'order': ['1', '2', '3', '4', '5'],
},
'place': {
'text': 'Lieu',
'data': {
'A10053179226': 'Centre Culturel ALBAN MINVILLE',
'A10053179227': 'Un autre centre culturel',
'A10053179757': 'ARGOULETS',
'A10053179876': 'LA RAMEE',
'A10056517597': 'TERRITOIRE OUEST',
},
'order': ['A10053179757', 'A10053179226', 'A10053179876', 'A10056517597', 'A10053179227'],
},
},
'all_criterias_order': ['service', 'nature', 'type', 'public', 'day', 'place'],
}
# make sure activities have a single place defined
for item in resp.json['data']:
@ -6000,13 +6107,14 @@ def test_read_activity_list(activity_service, con, app):
'meta': {
'reference_year': 2022,
'all_criterias': {
'service': {'text': 'Service', 'data': {}, 'order': []},
'nature': {'text': "Nature de l'activité", 'data': {}, 'order': []},
'type': {'text': "Type de l'activité", 'data': {}, 'order': []},
'public': {'text': 'Public', 'data': {}, 'order': []},
'day': {'text': 'Jours', 'data': {}, 'order': []},
'place': {'text': 'Lieu', 'data': {}, 'order': []},
},
'all_criterias_order': ['nature', 'type', 'public', 'day', 'place'],
'all_criterias_order': ['service', 'nature', 'type', 'public', 'day', 'place'],
},
'err': 0,
}
@ -6040,7 +6148,7 @@ def test_read_activity_list_cache(mocked_get, con, app):
)
resp = app.get(url, params=params)
assert resp.json['err'] == 0
assert len(resp.json['data']) == 4
assert len(resp.json['data']) == 8
assert [x.item_id for x in Referential.objects.filter(referential_name='ActivityCatalog')] == [
'1969',
@ -6052,7 +6160,7 @@ def test_read_activity_list_cache(mocked_get, con, app):
mocked_post.side_effect = ReadTimeout('timeout')
resp = app.get(url, params=params)
assert resp.json['err'] == 0
assert len(resp.json['data']) == 4
assert len(resp.json['data']) == 8
@mock.patch('passerelle.utils.Request.get')
@ -6067,8 +6175,9 @@ def test_read_activity_list_no_nature(mocked_post, mocked_get, con, app):
def mocked_reply(referential_name, id=None):
queryset = con.referential.filter(referential_name=referential_name, item_id=id)
data = [x.item_data for x in queryset]
activity = data[0]['data'][0]
activity['activityPortail']['activityType'] = None
for item in data[0]['data']:
activity = item
activity['activityPortail']['activityType'] = None
return data
with mock.patch('passerelle.contrib.toulouse_maelis.models.ToulouseMaelis.get_referential') as mocked_ref:
@ -7622,7 +7731,7 @@ def test_add_person_basket_subscription_with_recurrent_week(family_service, acti
family_service.add_soap_response('readFamily', get_xml_file('R_read_family.xml'))
activity_service.add_soap_response(
'getPersonUnitInfo',
get_xml_file('R_get_person_unit_info_with_conveyance.xml'),
get_xml_file('R_get_person_unit_info_with_recurrent_week.xml'),
)
activity_service.add_soap_response(
'addPersonUnitBasket',
@ -7648,6 +7757,35 @@ def test_add_person_basket_subscription_with_recurrent_week(family_service, acti
assert resp.json['err'] == 0
def test_add_person_basket_subscription_with_recurrent_week_empty(family_service, activity_service, con, app):
def request_check(request):
assert serialize_object(request.dayWeekInfoList) == []
family_service.add_soap_response('readFamily', get_xml_file('R_read_family.xml'))
activity_service.add_soap_response(
'getPersonUnitInfo',
get_xml_file('R_get_person_unit_info_with_recurrent_week.xml'),
)
activity_service.add_soap_response(
'addPersonUnitBasket',
get_xml_file('R_add_person_unit_basket.xml'),
request_check=request_check,
)
url = get_endpoint('add-person-basket-subscription')
params = {
'person_id': '613880',
'activity_id': 'A10051141965',
'unit_id': 'A10051141990',
'place_id': 'A10053179226',
'start_date': '2022-09-01',
'end_date': '2023-08-31',
'recurrent_week': '',
}
resp = app.post_json(url + '?family_id=311323', params=params)
assert resp.json['err'] == 0
def test_basket_subscription_providing_wcs_demand(family_service, activity_service, con, app):
family_service.add_soap_response('readFamily', get_xml_file('R_read_family_for_subscription.xml'))
activity_service.add_soap_response('getPersonUnitInfo', get_xml_file('R_get_person_unit_info.xml'))
@ -7904,7 +8042,7 @@ def test_add_person_subscription_with_recurrent_week(family_service, activity_se
family_service.add_soap_response('readFamily', get_xml_file('R_read_family.xml'))
activity_service.add_soap_response(
'getPersonUnitInfo',
get_xml_file('R_get_person_unit_info_with_conveyance.xml'),
get_xml_file('R_get_person_unit_info_with_recurrent_week.xml'),
)
activity_service.add_soap_response(
'addPersonUnitSubscribe',
@ -7930,6 +8068,35 @@ def test_add_person_subscription_with_recurrent_week(family_service, activity_se
assert resp.json['err'] == 0
def test_add_person_subscription_with_recurrent_week_empty(family_service, activity_service, con, app):
def request_check(request):
assert serialize_object(request.dayWeekInfoList) == []
family_service.add_soap_response('readFamily', get_xml_file('R_read_family.xml'))
activity_service.add_soap_response(
'getPersonUnitInfo',
get_xml_file('R_get_person_unit_info_with_recurrent_week.xml'),
)
activity_service.add_soap_response(
'addPersonUnitSubscribe',
get_xml_file('R_add_person_unit_subscribe.xml'),
request_check=request_check,
)
url = get_endpoint('add-person-subscription')
params = {
'person_id': '613880',
'activity_id': 'A10051141965',
'unit_id': 'A10051141990',
'place_id': 'A10053179226',
'start_date': '2022-09-01',
'end_date': '2023-08-31',
'recurrent_week': '',
}
resp = app.post_json(url + '?family_id=311323', params=params)
assert resp.json['err'] == 0
def test_add_person_subscription_with_conveyance(family_service, activity_service, con, app):
def request_check(request):
assert serialize_object(request.conveyanceSubscribe) == {