passerelle/passerelle/contrib/solis_apa/models.py

331 lines
12 KiB
Python

# Passerelle - uniform access to data and services
# Copyright (C) 2015 Entr'ouvert
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import json
import re
from urllib import parse as urlparse
from django.core.cache import cache
from django.db import models
from django.utils.translation import ugettext_lazy as _
from passerelle.base.models import BaseResource
from passerelle.contrib.solis_apa import conciliation, integration, suivi
from passerelle.utils.jsonresponse import APIError
HEADERS = {'Accept': 'application/json', 'Content-Type': 'application/json'}
APPLICATION = 'AsgTeleprocedureApa14'
class SolisAPA(BaseResource):
base_url = models.CharField(max_length=128, blank=False, verbose_name=_('url'))
verify_cert = models.BooleanField(default=True, verbose_name=_('Check HTTPS Certificate validity'))
username = models.CharField(max_length=128, blank=True, verbose_name=_('Username'))
password = models.CharField(max_length=128, blank=True, verbose_name=_('Password'))
keystore = models.FileField(
upload_to='solis_apa',
null=True,
blank=True,
verbose_name=_('Keystore'),
help_text=_('Certificate and private key in PEM format'),
)
category = _('Business Process Connectors')
class Meta:
verbose_name = _('Solis (legacy)')
@classmethod
def get_verbose_name(cls):
return cls._meta.verbose_name
def check_status(self):
self.get_communes(query='', use_cache=False)
def _check_requests_response(self, response):
try:
ret = response.json()
return ret
except (ValueError):
raise APIError('Response content is not a valid JSON')
def get_resource_url(self, uri):
return urlparse.urljoin(self.base_url, uri)
# Referentials methods
def _referential(
self, referential, keys=True, order_by=False, stop_on_error=False, attributes=None, **filters
):
attributes = attributes or []
uri = 'referential?referential=%s' % referential
url = self.get_resource_url(uri)
data = {
'ReferentialOptions': {
'processKeys': keys,
'processOrderBy': order_by,
'stopOnError': stop_on_error,
}
}
if filters:
solis_filters = []
for k, v in filters.items():
solis_filters.append({'key': k, 'value': v})
data['ReferentialOptions']['Filters'] = {'Filter': solis_filters}
if attributes:
data['ReferentialOptions']['Attributes'] = {
"referential": [
{
"schema": "stdr",
"table": referential,
"field": attributes,
}
]
}
data = json.dumps(data)
response = self.requests.post(url, data=data, headers=HEADERS)
if response.status_code != 200:
raise APIError('referential ws: error code %d' % response.status_code)
ret = self._check_requests_response(response)
l = []
count = int(ret['ReferentialOutputWS']['Entries']['@count'])
name = ret['ReferentialOutputWS']['Entries']['@name']
if count:
entries = ret['ReferentialOutputWS']['Entries']['Entry']
if not isinstance(entries, list):
entries = [entries]
l += entries
return {'results': l, 'name': name, 'count': count, 'error': False}
def _conciliation(self, config, **data):
uri = 'conciliation'
url = self.get_resource_url(uri)
name = config['block']['name'].lower()
data = json.dumps(conciliation.conciliation_payload(config, **data))
response = self.requests.post(url, data=data, headers=HEADERS)
if response.status_code != 200:
raise APIError('conciliation ws: error code %d' % response.status_code)
ret = self._check_requests_response(response)
l = []
count = int(ret['ConciliationOutputWS']['Results']['@count'])
if count:
results = ret['ConciliationOutputWS']['Results']['ResultsByAffinity']
if not isinstance(results, list):
results = [results]
for r in results:
affinity = r['@affinity']
entities = r['Entities']['entity']
if not isinstance(entities, list):
entities = [entities]
for e in entities:
e = conciliation.conciliation_output2dict(config, e)
e['@affinity'] = affinity
l.append(e)
return {'results': l, 'name': name, 'count': count, 'error': False}
def get_communes(self, query, code_dep=14, use_cache=True):
if query:
query = query.lower()
if re.match(r'^\d\d', query):
# query est le debut d'un code postal
code_dep = query[:2]
if not code_dep:
return {}
cache_key = 'solis-liste-communes-%s' % code_dep
ref = cache.get(cache_key)
if not ref or not use_cache:
ref = self._referential(referential='commune', attributes=['cp_lieu'], code_dep=code_dep)
if use_cache:
cache.set(cache_key, ref, 60 * 60)
villes = ref.get('results')
ret = []
for v in villes:
# {'Attributes': {'Attribute': {'id': 'stdr.commune.cp_lieu',
# 'value': 14210}}, 'Keys': {'Key': [{'id':
# 'stdr.commune.code_a_com', 'value': 771}, {'id':
# 'stdr.commune.code_dep', 'value': 14}]}, 'id':
# 'commune-14-771', 'value': 'NEUILLY LE MALHERBE'},
#
# However, the lack of some informations has already been observed,
# so there are some test/continue here
if ('value' not in v) or ('id' not in v):
continue
attrs = {}
for attr in v['Attributes']['Attribute']:
attrs[attr['id']] = attr['value']
if 'stdr.commune.cp_lieu' not in attrs:
continue
text = '%0.5d %s' % (attrs['stdr.commune.cp_lieu'], v['value'].strip())
if query and query not in text.lower():
continue
ret.append({'id': v['id'], 'text': text})
return ret
def _cache(self, key, value=None):
if value:
cache.set(key, value, 60 * 60)
return True
cache_data = cache.get(key)
if cache_data:
return cache_data
def get_lieux(self, q, commune, departement):
# si commune est un code solis de la forme commune-dep-com
if commune and commune.startswith('commune-'):
dummy, departement, commune = commune.split('-')
call = self._conciliation(
conciliation.CONCILIATION_ADRESSE, commune=commune, departement=departement, lieu='%%%s%%' % q
)
lieux = call.get('results')
ret = []
for l in lieux:
# '@affinity': '5',
# 'CodeDepartement/@V': '',
# 'CodeLieu/@V': '0110',
# 'CodePostal/@V': '14000',
# 'Commune/NomCom/@V': 'CAEN',
# 'Commune/PK/CodeCommune/@V': '118',
# 'NatureLieu/@Lc': 'RUE',
# 'NomLieu/@V': 'DU BEAU SITE'
for k, v in l.items():
l[k] = v.strip()
ret.append(
{
'id': '%(CodeLieu/@V)s' % l,
'text': '%(NatureLieu/@Lc)s %(NomLieu/@V)s' % l,
'affinity': '%(@affinity)s' % l,
}
)
return ret
def get_homonymes(self, nom, prenom, dn):
if dn:
dn = dn[6:] + '-' + dn[3:5] + '-' + dn[:2]
call = self._conciliation(conciliation.CONCILIATION_INDIVIDU, nom=nom, prenom=prenom, dn=dn)
else:
call = self._conciliation(conciliation.CONCILIATION_INDIVIDU_SANS_DN, nom=nom, prenom=prenom)
individus = call.get('results')
ret = []
for i in individus:
# i = {'@affinity': '3',
# 'Dossier/Adresse/Commune/NomCom/@V': 'ST JULIEN EN GENEVOIS',
# 'Dossier/Adresse/ComplementLieu/@V': 'ROUTE DE THOIRY',
# 'Dossier/Adresse/CpLieu/@V': '74160',
# 'Dossier/Adresse/NatureLieu/@Lc': '',
# 'Dossier/Adresse/NomLieu/@V': '.',
# 'Dossier/Adresse/NumeroLieu/@V': '39',
# 'Dossier/PK/IndexDossier/@V': '162438',
# 'EtatCivil/DateNaissance/@V': '1933-08-28',
# 'EtatCivil/Nom/@V': 'DUPONT',
# 'EtatCivil/NomJeuneFille/@V': 'BUATHIER',
# 'EtatCivil/Prenom/@V': 'JEANNE',
# 'PK/IndexIndividu/@V': '208359'},
for k, v in i.items():
i[k] = v.strip()
njf = i['EtatCivil/NomJeuneFille/@V']
if njf:
i['EtatCivil/NomJeuneFille/@V'] = ' (%s)' % njf
if not i['EtatCivil/DateNaissance/@V']:
i['EtatCivil/DateNaissance/@V'] = 'date de naissance inconnue'
ret.append(
{
'id': '%(PK/IndexIndividu/@V)s' % i,
'text': (
'%(EtatCivil/Nom/@V)s%(EtatCivil/NomJeuneFille/@V)s %(EtatCivil/Prenom/@V)s'
+ ' - %(EtatCivil/DateNaissance/@V)s'
+ ' - %(Dossier/Adresse/CpLieu/@V)s %(Dossier/Adresse/Commune/NomCom/@V)s'
)
% i,
'affinity': '%(@affinity)s' % i,
}
)
ret.sort(key=lambda x: x['affinity'])
ret.reverse()
return ret
def _process_common_ref(self, ref_name, q=None):
cache_key = 'solis-apa-%s' % ref_name.replace(' ', '-')
ref = self._cache(cache_key)
if not ref:
ref = self._referential(ref_name)
self._cache(cache_key, ref)
ret = []
for result in ref.get('results'):
if result.get('value') and (not q or q.lower() in result['value'].lower()):
ret.append({'id': result['id'], 'text': result['value']})
return ret
def get_referential(self, reference_name, q=None):
return self._process_common_ref(reference_name.replace('-', ' '), q=q)
def get_suivi(self, suivi_type, datedebut, datefin):
resource = {
'visite': 'ExportSuiviVisite',
'plan-aide': 'ExportSuiviPlanAide',
'presentation-commission': 'ExportSuiviPresentationCommission',
'decision-commission': 'ExportSuiviDecisionCommission',
}
uri = 'exportFlow?flow={}&application={}'.format(resource[suivi_type], APPLICATION)
url = self.get_resource_url(uri)
payload = suivi.render_payload(suivi_type, datedebut, datefin)
payload = json.dumps(payload)
response = self.requests.post(url, data=payload, headers=HEADERS, timeout=300)
if response.status_code != 200:
raise APIError('suivi %s ws: error code %d' % (suivi_type, response.status_code))
response = self._check_requests_response(response)
output = suivi.suivi_output(suivi_type, response)
return output
def import_flow(self, data):
uri = 'importFlow?flow=ImportIntegrationDemande&application=%s' % APPLICATION
url = self.get_resource_url(uri)
data = {'ImportInputWSDemandeApa': integration.build_message(json.loads(data))}
data = json.dumps(data)
self.logger.debug('Demande APA: %s' % data, extra={'solis_apa_demande': data})
response = self.requests.post(url, data=data, headers=HEADERS)
if response.status_code != 200:
raise APIError('integration ws: error code %d' % response.status_code)
response = self._check_requests_response(response)
ret = {}
for x in response['ImportIdResults']['Items']:
ret[x['key']] = x['value']
return ret