This repository has been archived on 2024-02-02. You can view files and clone it, but cannot push or open issues or pull requests.
passerelle-reunion-fsn/passerelle_reunion_fsn/models.py

514 lines
16 KiB
Python

# -*- coding: utf-8 -*-
# passerelle-reunion-fsn
# Copyright (C) 2020 Entr'ouvert
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import base64
import csv
import hashlib
from io import BytesIO
from django.core.urlresolvers import reverse
from django.db import models, transaction
from django.http import FileResponse
from django.utils.dateparse import parse_datetime
from django.utils.encoding import force_str, smart_text
from django.utils.translation import ugettext_lazy as _
from passerelle.base.models import BaseResource
from passerelle.utils.api import endpoint
from passerelle.utils.jsonresponse import APIError
COLUMNS_KEYNAMES = [
'code_application',
'sequence',
'periode',
'siren',
'nom1',
'nom2',
'nb_salarie',
'rue',
'boite_postale',
'code_postale',
'ville',
'code_pays',
'region',
'iban',
'montant',
'devise',
'domaine_activite',
'societe',
'nb_dp',
'date_dp',
'date_paiement',
'nom_demandeur',
'prenom_demandeur',
'qualite',
'tel',
'courriel',
]
def csv_file_location(instance, filename):
return 'fsn_reunion/%s/%s' % (instance.ds_id, filename)
class FSNReunionConnector(BaseResource):
category = _('Business Process Connectors')
class Meta:
verbose_name = _('FSN Reunion')
api_url = models.URLField(max_length=400, verbose_name=_('DS API URL'))
token = models.CharField(max_length=256, verbose_name=_('DS token'))
demarche_number = models.IntegerField(verbose_name=_('Demarche number'))
instructeur_id = models.CharField(
max_length=256, blank=True, verbose_name=_('Instructeur identifier'),
help_text=_('Region identifier for this case')
)
def _ds_call(self, query, variables):
headers = {
'Authorization': 'Bearer token=' + self.token
}
data = {
'query': query,
'variables': variables
}
response = self.requests.post(self.api_url, headers=headers, json=data)
if response.status_code != 200:
raise APIError('An error occured, status code : %s' % response.status_code)
json = response.json()
if 'errors' in json:
msg = 'Unkown error'
for error in json['errors']:
if 'message' in error:
msg = error['message']
break
raise APIError('An error occured : %s' % msg)
return json['data']
@endpoint(
methods=['get'], perm='can_access', name='dsproxy-get-instructeurs',
description=_('DS Proxy: get instructeurs')
)
def dsproxy_get_instructeurs(self, request):
query = '''
query getInstructeurs($demarcheNumber: Int!){
demarche(number: $demarcheNumber) {
groupeInstructeurs {
id
label
}
}
}
'''
variables = {
'demarcheNumber': self.demarche_number
}
return {
'data': self._ds_call(query, variables)
}
@endpoint(
methods=['get'], perm='can_access', name='dsproxy-get-liste-champs',
description=_('DS Proxy: get fields identifiers'),
)
def dsproxy_get_liste_champs(self, request):
query = '''
query getChampDescriptors($demarcheNumber: Int!) {
demarche(number: $demarcheNumber) {
champDescriptors {
id
label
type
required
}
}
}
'''
variables = {
'demarcheNumber': self.demarche_number
}
return {
'data': self._ds_call(query, variables)
}
def _ds_get_dossiers(self):
if not self.instructeur_id:
raise APIError('Need an instructeur identifer')
query = '''
query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: Int, $after: String) {
demarche(number: $demarcheNumber) {
number
dossiers(first: $first, after: $after, createdSince: $createdSince) {
pageInfo {
hasNextPage
endCursor
}
nodes {
id
number
state
datePassageEnConstruction
datePassageEnInstruction
dateTraitement
dateDerniereModification
usager {
email
}
groupeInstructeur {
id
}
demandeur {
... on PersonneMorale {
siret
entreprise {
siren
raisonSociale
}
}
}
messages {
email
body
createdAt
attachment {
url
filename
contentType
byteSize
checksum
}
}
champs {
id
label
... on TextChamp {
value
}
... on DateChamp {
value
}
... on PieceJustificativeChamp {
file {
url
filename
contentType
byteSize
checksum
}
}
}
}
}
}
}
'''
variables = {
'demarcheNumber': self.demarche_number,
'after': None
}
dossiers = []
has_next_page = True
while has_next_page:
raw_data = self._ds_call(query, variables)
data = raw_data['demarche']['dossiers']
for node in data['nodes']:
if node['groupeInstructeur']['id'] == self.instructeur_id or not self.instructeur_id:
dossiers.append(node)
has_next_page = data['pageInfo']['hasNextPage']
variables['after'] = data['pageInfo']['endCursor']
def get_passage_en_construction(x):
return parse_datetime(x['datePassageEnConstruction'])
return {
'dossiers': sorted(dossiers, key=get_passage_en_construction),
'num_dossiers': len(dossiers)
}
@endpoint(
methods=['get'], perm='can_access', name='dsproxy-get-dossiers',
description=_('DS Proxy: get dossiers')
)
def dsproxy_get_dossiers(self, request):
return {
'data': self._ds_get_dossiers()
}
@endpoint(
methods=['get'], perm='can_access', name='fetch-dossiers',
description=_('Fetch dossiers from DS and consolidate into local data')
)
def fetch_dossiers(self, request):
dossiers = self._fetch_dossiers(request)
return {
'data': {
'dossiers': dossiers,
'num_dossiers': len(dossiers)
}
}
def _fetch_dossiers(self, request):
res = []
for dossier in self._ds_get_dossiers()['dossiers']:
id_dossier = dossier['id']
try:
local_dossier = self.dossiers.get(ds_id=id_dossier)
# already fetched, update metadata
local_dossier.ds_state = dossier['state']
local_dossier.save()
continue
except DSDossier.DoesNotExist:
pass
with transaction.atomic():
# grab file
ds_dossier = None
for champ in dossier['champs']:
if 'file' in champ:
file_url = champ['file']['url']
filename = champ['file']['filename']
response = self.requests.get(file_url)
assert response.status_code == 200
ds_dossier = DSDossier.objects.create(
resource=self, ds_id=id_dossier, csv_filename=filename,
ds_state=dossier['state'], csv_checksum=champ['file']['checksum']
)
ds_dossier.csv_file.save(filename, BytesIO(response.content))
filhash = hashlib.md5(ds_dossier.csv_file.read())
if base64.b64encode(filhash.digest()).decode() != ds_dossier.csv_checksum:
raise APIError('Bad checksum')
res.append(ds_dossier.to_json(request))
break
# upsert into Entreprise
if ds_dossier:
for row in ds_dossier.get_rows():
sequence = row['sequence']
try:
entreprise = Entreprise.objects.filter(resource=self, sequence=sequence).get()
for attr, value in row.items():
setattr(entreprise, attr, value)
except Entreprise.DoesNotExist:
entreprise = Entreprise(resource=self, **row)
entreprise.save()
return res
@endpoint(
methods=['get'], perm='can_access', name='get-dossiers',
description=_('Get dossiers')
)
def get_dossiers(self, request):
res = []
for dossier in self.dossiers.all():
res.append(dossier.to_json(request))
return {
'data': {
'dossiers': res
}
}
@endpoint(
methods=['get'], perm='can_access', example_pattern='{dossier_pk}/',
pattern='^(?P<dossier_pk>\w+)/$', name='get-dossier-file',
parameters={
'dossier_pk': {
'description': _('Local dossier identifier'),
'example_value': '2'
}
}, description=_('Get csv file from dossier')
)
def get_dossier_file(self, request, dossier_pk):
try:
dossier = self.dossiers.get(pk=dossier_pk)
except DSDossier.DoesNotExist:
raise APIError('Dossier deos not exist')
response = FileResponse(dossier.csv_file, content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="%s"' % dossier.csv_filename
return response
@endpoint(
methods=['get'], perm='can_access', name='get-csv',
description=_('Get consolidated data (csv file)')
)
def get_csv(self, request):
class Echo(object):
"""An object that implements just the write method of the file-like
interface.
"""
def write(self, value):
"""Write the value by returning it, instead of storing in a buffer."""
return value
def get_rows():
for entreprise in Entreprise.objects.filter(resource=self):
row = [getattr(entreprise, attr) for attr in COLUMNS_KEYNAMES]
yield row
pseudo_buffer = Echo()
writer = csv.writer(pseudo_buffer)
response = FileResponse(
(writer.writerow(row) for row in get_rows()), content_type="text/csv"
)
response['Content-Disposition'] = 'attachment; filename="somefilename.csv"'
return response
@endpoint(
methods=['get'], perm='can_access', name='get-data',
description=_('Get data by sequence or siren')
)
def get_data(self, request, sequence=None, siren=None):
def build_result(entreprise):
return {
'data': {attr: getattr(entreprise, attr) for attr in COLUMNS_KEYNAMES}
}
if sequence is None and siren is None:
raise APIError('Need sequence or siren')
if sequence:
try:
entreprise = Entreprise.objects.get(resource=self, sequence=sequence)
return build_result(entreprise)
except Entreprise.DoesNotExist:
pass
if siren:
try:
entreprise = Entreprise.objects.get(resource=self, siren=siren)
return build_result(entreprise)
except Entreprise.DoesNotExist:
pass
return {
'data': {}
}
class Entreprise(models.Model):
class Meta:
unique_together = (('resource', 'sequence'), ('resource', 'siren'),)
resource = models.ForeignKey(FSNReunionConnector, on_delete=models.CASCADE)
code_application = models.CharField(max_length=20)
sequence = models.CharField(max_length=16)
periode = models.CharField(max_length=35)
siren = models.CharField(max_length=11)
nom1 = models.CharField(max_length=35, blank=True)
nom2 = models.CharField(max_length=35, blank=True)
nb_salarie = models.CharField(max_length=2)
rue = models.CharField(max_length=35, blank=True)
boite_postale = models.CharField(max_length=10, blank=True)
code_postale = models.CharField(max_length=10, blank=True)
ville = models.CharField(max_length=35, blank=True)
code_pays = models.CharField(max_length=2)
region = models.CharField(max_length=3)
iban = models.CharField(max_length=34)
montant = models.FloatField()
devise = models.CharField(max_length=3)
domaine_activite = models.CharField(max_length=4)
societe = models.CharField(max_length=4)
nb_dp = models.CharField(max_length=10)
date_dp = models.CharField(max_length=10)
date_paiement = models.CharField(max_length=10)
nom_demandeur = models.CharField(max_length=35, blank=True)
prenom_demandeur = models.CharField(max_length=35, blank=True)
qualite = models.CharField(max_length=35, blank=True)
tel = models.CharField(max_length=30, blank=True)
courriel = models.CharField(max_length=241, blank=True)
last_update_datetime = models.DateTimeField(auto_now=True)
class DSDossier(models.Model):
class Meta:
unique_together = (('resource', 'ds_id'),)
resource = models.ForeignKey(
FSNReunionConnector, on_delete=models.CASCADE, related_name='dossiers'
)
ds_id = models.CharField(max_length=256)
ds_state = models.CharField(max_length=256)
csv_file = models.FileField(upload_to=csv_file_location)
csv_filename = models.CharField(max_length=256)
csv_checksum = models.CharField(max_length=256)
last_update_datetime = models.DateTimeField(auto_now=True)
def to_json(self, request):
csv_file_url = request.build_absolute_uri(
reverse(
'generic-endpoint',
kwargs={
'connector': self.resource.get_connector_slug(),
'slug': self.resource.slug,
'endpoint': 'get-dossier-file'
}
)
) + '/%s/' % self.id
return {
'id': str(self.pk),
'ds_id': self.ds_id,
'ds_state': self.ds_state,
'csv_filename': self.csv_filename,
'csv_file': csv_file_url,
'csv_checksum': self.csv_checksum,
'last_update_datetime': self.last_update_datetime
}
def get_content_without_bom(self):
self.csv_file.seek(0)
content = self.csv_file.read()
return force_str(content.decode('utf-8-sig', 'ignore').encode('utf-8'))
def get_rows(self):
content = self.get_content_without_bom()
reader = csv.reader(content.splitlines(), delimiter=';')
rows = list(reader)
if not rows:
return []
rows = [[smart_text(x) for x in y] for y in rows if y]
titles = [t.strip() for t in COLUMNS_KEYNAMES]
indexes = [titles.index(t) for t in titles if t]
caption = [titles[i] for i in indexes]
def get_cell(row, index):
try:
return row[index]
except IndexError:
return None
return [{caption: get_cell(row, index) for caption, index in zip(caption, indexes)} for row in rows]