This repository has been archived on 2024-02-02. You can view files and clone it, but cannot push or open issues or pull requests.
passerelle-reunion-fsn/passerelle_reunion_fsn/models.py

887 lines
29 KiB
Python

# -*- coding: utf-8 -*-
# passerelle-reunion-fsn
# Copyright (C) 2020 Entr'ouvert
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import base64
from datetime import datetime
import hashlib
from io import BytesIO
import os
import os.path
import tempfile
from django.conf import settings
from django.contrib.postgres.fields import JSONField
from django.core.files import File
from django.core.urlresolvers import reverse
from django.db import models, transaction
from django.http import FileResponse, HttpResponse
from django.utils import dateformat, six
from django.utils.dateparse import parse_date, parse_datetime
from django.utils.encoding import force_str, force_text, smart_text
from django.utils.six.moves.urllib import parse as urlparse
from django.utils.translation import ugettext_lazy as _
from passerelle.base.models import BaseResource
from passerelle.base.signature import sign_url
from passerelle.utils.api import endpoint
from passerelle.utils.jsonresponse import APIError
if six.PY3:
import csv
else:
import unicodecsv as csv
GENERATE_BATCH_SCHEMA = {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Generate batch",
"description": "",
"type": "object",
"properties": {
"batch_date": {
"description": "Bacth date",
"type": "string",
},
"force": {
"description": "Force",
"type": "boolean",
}
}
}
COLUMNS_KEYNAMES = [
'code_application',
'sequence',
'periode',
'siren',
'nom1',
'nom2',
'nb_salarie',
'rue',
'boite_postale',
'code_postale',
'ville',
'code_pays',
'region',
'iban',
'montant',
'devise',
'domaine_activite',
'societe',
'nb_dp',
'date_dp',
'date_paiement',
'nom_demandeur',
'prenom_demandeur',
'qualite',
'tel',
'code_ape',
'courriel',
]
def csv_file_location(instance, filename):
return 'fsn_reunion/%s/%s' % (instance.ds_id, filename)
def batch_csv_file_location(instance, filename):
return 'fsn_reunion/%s/batch/%s/%s/%s' % (
instance.batch.resource.id,
dateformat.format(instance.batch.batch_date, 'Y-m-d'),
instance.id, filename
)
class FSNReunionConnector(BaseResource):
category = _('Business Process Connectors')
class Meta:
verbose_name = _('FSN Reunion')
api_url = models.URLField(max_length=400, verbose_name=_('DS API URL'))
token = models.CharField(max_length=256, verbose_name=_('DS token'))
demarche_number = models.IntegerField(verbose_name=_('Demarche number'))
instructeur_id = models.CharField(
max_length=256, blank=True, verbose_name=_('Instructeur identifier'),
help_text=_('Region identifier for this case')
)
wcs_form_slug = models.CharField(max_length=256, blank=True, verbose_name=_('WCS form slug'))
wcs_options = JSONField(null=True, blank=True)
def hourly(self):
self.logger.info('start fetch dossier')
dossiers = self._fetch_dossiers()
self.logger.info('num new dossiers fetched: %s' % len(dossiers))
# def daily(self):
# self.logger.info('start generate batch')
# for batch in self._generate_batches():
# self.logger.info('created batch %s' % batch.batch_date)
# self.logger.info('end generate batch')
def _ds_call(self, query, variables):
headers = {
'Authorization': 'Bearer token=' + self.token
}
data = {
'query': query,
'variables': variables
}
response = self.requests.post(self.api_url, headers=headers, json=data)
if response.status_code != 200:
raise APIError('An error occured, status code : %s' % response.status_code)
json = response.json()
if 'errors' in json:
msg = 'Unkown error'
for error in json['errors']:
if 'message' in error:
msg = error['message']
break
raise APIError('An error occured : %s' % msg)
return json['data']
def _wcs_call(self, filters={}):
if not getattr(settings, 'KNOWN_SERVICES', {}).get('wcs'):
raise APIError('No wcs found')
wcs_service = list(settings.KNOWN_SERVICES['wcs'].values())[0]
if self.wcs_options and 'instance' in self.wcs_options:
wcs_service = self.wcs_options['instance']
base_url = wcs_service['url']
orig = wcs_service.get('orig')
secret = wcs_service.get('secret')
limit = 10
params = {
'orig': orig,
'full': 'on',
'limit': limit,
'order_by': '-receipt_time'
}
params.update(self.wcs_options.get('filters', {}))
params.update(filters)
offset = 0
has_data = True
while has_data:
params['offset'] = offset
query_string = urlparse.urlencode(params)
api_url = sign_url(
urlparse.urljoin(
base_url,
'api/forms/%s/list?%s' % (self.wcs_form_slug, query_string)
),
key=secret
)
response = self.requests.get(api_url)
if response.status_code != 200:
raise APIError('Error fetching data from wcs')
data = response.json()
if not data:
has_data = False
else:
for form in data:
yield form
offset += limit
@endpoint(
methods=['get'], perm='can_access', name='wcs-call',
description=_('Call wcs debug')
)
def wcs_call(self, request):
return {
'data': [form for form in self._wcs_call()]
}
@endpoint(
methods=['get'], perm='can_access', name='dsproxy-get-instructeurs',
description=_('DS Proxy: get instructeurs')
)
def dsproxy_get_instructeurs(self, request):
query = '''
query getInstructeurs($demarcheNumber: Int!){
demarche(number: $demarcheNumber) {
groupeInstructeurs {
id
label
}
}
}
'''
variables = {
'demarcheNumber': self.demarche_number
}
return {
'data': self._ds_call(query, variables)
}
@endpoint(
methods=['get'], perm='can_access', name='dsproxy-get-liste-champs',
description=_('DS Proxy: get fields identifiers'),
)
def dsproxy_get_liste_champs(self, request):
query = '''
query getChampDescriptors($demarcheNumber: Int!) {
demarche(number: $demarcheNumber) {
champDescriptors {
id
label
type
required
}
}
}
'''
variables = {
'demarcheNumber': self.demarche_number
}
return {
'data': self._ds_call(query, variables)
}
def _ds_get_dossiers(self):
if not self.instructeur_id:
raise APIError('Need an instructeur identifer')
query = '''
query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: Int, $after: String) {
demarche(number: $demarcheNumber) {
number
dossiers(first: $first, after: $after, createdSince: $createdSince) {
pageInfo {
hasNextPage
endCursor
}
nodes {
id
number
state
datePassageEnConstruction
datePassageEnInstruction
dateTraitement
dateDerniereModification
usager {
email
}
groupeInstructeur {
id
}
demandeur {
... on PersonneMorale {
siret
entreprise {
siren
raisonSociale
}
}
}
messages {
email
body
createdAt
attachment {
url
filename
contentType
byteSize
checksum
}
}
champs {
id
label
... on TextChamp {
value
}
... on DateChamp {
value
}
... on PieceJustificativeChamp {
file {
url
filename
contentType
byteSize
checksum
}
}
}
}
}
}
}
'''
variables = {
'demarcheNumber': self.demarche_number,
'after': None
}
dossiers = []
has_next_page = True
while has_next_page:
raw_data = self._ds_call(query, variables)
data = raw_data['demarche']['dossiers']
for node in data['nodes']:
if node['groupeInstructeur']['id'] == self.instructeur_id or not self.instructeur_id:
dossiers.append(node)
has_next_page = data['pageInfo']['hasNextPage']
variables['after'] = data['pageInfo']['endCursor']
def get_passage_en_construction(x):
return parse_datetime(x['datePassageEnConstruction'])
return {
'dossiers': sorted(dossiers, key=get_passage_en_construction),
'num_dossiers': len(dossiers)
}
@endpoint(
methods=['get'], perm='can_access', name='dsproxy-get-dossiers',
description=_('DS Proxy: get dossiers')
)
def dsproxy_get_dossiers(self, request):
return {
'data': self._ds_get_dossiers()
}
@endpoint(
methods=['post'], perm='can_access', name='fetch-dossiers',
description=_('Fetch dossiers from DS and consolidate into local data')
)
def fetch_dossiers(self, request):
dossiers = self._fetch_dossiers(request)
return {
'data': {
'dossiers': dossiers,
'num_dossiers': len(dossiers)
}
}
def _fetch_dossiers(self, request=None):
res = []
for dossier in self._ds_get_dossiers()['dossiers']:
id_dossier = dossier['id']
try:
local_dossier = self.dossiers.get(ds_id=id_dossier)
# already fetched, update metadata
local_dossier.ds_state = dossier['state']
local_dossier.save()
continue
except DSDossier.DoesNotExist:
pass
with transaction.atomic():
# grab file
ds_dossier = None
for champ in dossier['champs']:
if 'file' in champ:
file_url = champ['file']['url']
filename = champ['file']['filename']
response = self.requests.get(file_url)
assert response.status_code == 200
ds_dossier = DSDossier.objects.create(
resource=self, ds_id=id_dossier, csv_filename=filename,
ds_state=dossier['state'], csv_checksum=champ['file']['checksum']
)
ds_dossier.csv_file.save(filename, BytesIO(response.content))
filhash = hashlib.md5(ds_dossier.csv_file.read())
if base64.b64encode(filhash.digest()).decode() != ds_dossier.csv_checksum:
raise APIError('Bad checksum')
res.append(ds_dossier.to_json(request))
break
# upsert into Entreprise
if ds_dossier:
for row in ds_dossier.get_rows():
sequence = row['sequence']
try:
entreprise = Entreprise.objects.filter(resource=self, sequence=sequence).get()
for attr, value in row.items():
setattr(entreprise, attr, value)
except Entreprise.DoesNotExist:
entreprise = Entreprise(resource=self, **row)
entreprise.save()
return res
@endpoint(
methods=['get'], perm='can_access', name='get-dossiers',
description=_('Get dossiers')
)
def get_dossiers(self, request):
res = []
for dossier in self.dossiers.all():
res.append(dossier.to_json(request))
return {
'data': {
'dossiers': res
}
}
@endpoint(
methods=['get'], perm='can_access', example_pattern='{dossier_pk}/',
pattern='^(?P<dossier_pk>\w+)/$', name='get-dossier-file',
parameters={
'dossier_pk': {
'description': _('Local dossier identifier'),
'example_value': '2'
}
}, description=_('Get csv file from dossier')
)
def get_dossier_file(self, request, dossier_pk):
try:
dossier = self.dossiers.get(pk=dossier_pk)
except DSDossier.DoesNotExist:
raise APIError('Dossier deos not exist')
response = FileResponse(dossier.csv_file, content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="%s"' % dossier.csv_filename
return response
@endpoint(
methods=['get'], perm='can_access', name='get-csv',
description=_('Get consolidated data (csv file)')
)
def get_csv(self, request):
class Echo(object):
"""An object that implements just the write method of the file-like
interface.
"""
def write(self, value):
"""Write the value by returning it, instead of storing in a buffer."""
return value
def get_rows():
for entreprise in Entreprise.objects.filter(resource=self):
row = [force_str(getattr(entreprise, attr)) for attr in COLUMNS_KEYNAMES]
yield row
pseudo_buffer = Echo()
writer = csv.writer(pseudo_buffer)
response = FileResponse(
(writer.writerow(row) for row in get_rows()), content_type="text/csv"
)
response['Content-Disposition'] = 'attachment; filename="somefilename.csv"'
return response
@endpoint(
methods=['get'], perm='can_access', name='get-data',
description=_('Get data by sequence or siren')
)
def get_data(self, request, sequence=None, siren=None):
def build_result(entreprise):
return {
'data': {attr: getattr(entreprise, attr) for attr in COLUMNS_KEYNAMES}
}
if sequence is None and siren is None:
raise APIError('Need sequence or siren')
if sequence:
try:
entreprise = Entreprise.objects.get(resource=self, sequence=sequence)
return build_result(entreprise)
except Entreprise.DoesNotExist:
pass
if siren:
entreprise = Entreprise.objects.filter(resource=self, siren=siren).order_by('sequence').last()
if entreprise:
return build_result(entreprise)
return {
'data': {}
}
@endpoint(
methods=['get'], perm='can_access', name='get-batches',
description=_('Get batches')
)
def get_batches(self, request):
res = []
for batch in self.batches.all():
res.append(
{
'id': batch.pk,
'text': dateformat.format(batch.batch_date, 'l d F Y'),
'url': request.build_absolute_uri(batch.get_absolute_url())
}
)
return {'data': res}
@endpoint(
methods=['get'], perm='can_access', name='batchfile',
pattern='^(?P<batchfile_pk>\w+)/$', example_pattern='{batchfile_pk}/',
parameters={
'batchfile_pk': {
'description': _('Batch file identifier'),
'example_value': '2'
}
}, description=_('Get batch file')
)
def batchfile(self, request, batchfile_pk):
try:
batch_file = BatchFile.objects.get(pk=batchfile_pk)
except BatchFile.DoesNotExist:
raise APIError('Unkwon batch file identifier')
response = HttpResponse(
batch_file.csv_file.read(), content_type="text/csv"
)
response['Content-Disposition'] = 'attachment; filename="%s"' % batch_file.csv_filename
return response
@endpoint(
methods=['get'], perm='can_access', name='batch',
description=_('Get batch'),
pattern='^(?P<batch_pk>\w+)/$', example_pattern='{batch_pk}/',
parameters={
'batch_pk': {
'description': _('Batch identifier'),
'example_value': '2'
}
}
)
def batch(self, request, batch_pk):
try:
batch = self.batches.get(pk=batch_pk)
except Batch.DoesNotExist:
raise APIError('Batch does not exist')
batch_file = batch.files.filter(ready=True).order_by('-last_update_datetime').first()
if not batch_file:
raise APIError('No file available')
res = {}
batch_files = []
for i, batchfile in enumerate(batch.files.filter(ready=True).order_by('-last_update_datetime')):
batchfile_json = batchfile.to_json()
batchfile_json['url'] = request.build_absolute_uri(batchfile_json['url'])
if i == 0:
res['last_file'] = batchfile_json
batch_files.append(batchfile_json)
res['batch_files'] = batch_files
res.update(batch.to_json())
res['url'] = request.build_absolute_uri(res['url'])
return {
'data': res
}
@endpoint(
perm='can_access', name='generate-batches',
post={
'description': _('Generate batches'),
'request_body': {
'schema': {
'application/json': GENERATE_BATCH_SCHEMA
}
}
}
)
def generate_batches(self, request, post_data):
batch_date = post_data.get('batch_date')
force = post_data.get('force', False)
def abs_url(data):
data['url'] = request.build_absolute_uri(data['url'])
return data
return {
'data': [abs_url(batch.to_json()) for batch in self._generate_batches(batch_date, force)]
}
@transaction.atomic
def _generate_batches(self, batch_date=None, force=False):
target_date_obj = None
if batch_date is not None:
target_date_obj = parse_date(batch_date)
if target_date_obj is None:
raise APIError("Can't parse batch identifier")
force = True
code_app = 'TST003'
code_region = 'R04'
code_pays = 'FR'
devise = 'EUR'
def get_data(fields, field_name):
return fields[field_name] or ''
def write_row(writer, row):
writer.writerow([force_text(i) for i in row])
def add_target_batch(refs, target):
fd, temp_file_name = tempfile.mkstemp()
os.close(fd)
if six.PY3:
f = open(temp_file_name, 'w', encoding='utf-8')
refs[target] = (csv.writer(f, delimiter=';'), f, temp_file_name)
else:
f = open(temp_file_name, 'wb')
refs[target] = (csv.writer(f, delimiter=';', encoding='utf-8'), f, temp_file_name)
target_batches = {}
now = datetime.now().date()
for form in self._wcs_call():
avis = form['workflow']['fields']['avis_favorable_defavorable']
if not avis or avis != 'Favorable':
continue
date_raw = form['workflow']['fields']['date_avis_favorable']
if not date_raw:
raise APIError("Missing date")
date_obj = parse_date(date_raw)
if date_obj is None:
raise APIError("Can't parse date")
# FOR TEST because no accepted form yet
# receipt_time = form['receipt_time']
# if not receipt_time:
# raise APIError("Missing receipt_time")
# date_obj = parse_datetime(receipt_time).date()
# if date_obj is None:
# raise APIError("Can't parse date")
if date_obj >= now:
continue
if target_date_obj and date_obj != target_date_obj:
continue
try:
batch = self.batches.get(batch_date=date_obj)
if force:
if batch not in target_batches:
add_target_batch(target_batches, batch)
else:
continue
except Batch.DoesNotExist:
batch = Batch.objects.create(resource=self, batch_date=date_obj)
add_target_batch(target_batches, batch)
# write data
form_fields = form['fields']
workflow_fields = form['workflow']['fields']
sequence = "%s-%s" % (code_region, form['display_id'])
period = 'MARS-VOLET2'
siren = form_fields['siren']
nom1 = form_fields['nom_responsable'] + form_fields['prenom_responsable']
nom2 = ''
nb_salaries = form_fields['nb_salaries']
rue = form_fields['numero_voie']
bp = get_data(form_fields, 'boite_postale')
cp = get_data(form_fields, 'code_postal')
ville = get_data(form_fields, 'ville')
iban = get_data(form_fields, 'iban')
montant = get_data(workflow_fields, 'montant_aide_volet_2')
nom_demandeur = form_fields['nom_demandeur']
prenom_demandeur = form_fields['prenom_demandeur']
qualite = ''
tel = get_data(form_fields, 'telephone_demandeur')
courriel = get_data(form_fields, 'courriel_demandeur')
write_row(
target_batches[batch][0],
[
code_app, sequence, period, siren, nom1, nom2, nb_salaries, rue,
bp, cp, ville, code_pays, code_region, iban, montant, devise, nom_demandeur,
prenom_demandeur, qualite, tel, courriel
]
)
# create batch file objects
for batch, (csv_writer, f, temp_file_name) in target_batches.items():
f.close()
with open(temp_file_name, 'rb') as tf:
csv_filename = '%s.csv' % dateformat.format(batch.batch_date, 'Y-m-d')
batch_file = BatchFile.objects.create(batch=batch, csv_filename=csv_filename)
batch_file.csv_file.save(csv_filename, File(tf))
batch_file.ready = True
batch_file.save()
os.unlink(temp_file_name)
return [batch for batch in target_batches.keys()]
class Entreprise(models.Model):
class Meta:
unique_together = (('resource', 'sequence'), ('resource', 'periode', 'siren'),)
resource = models.ForeignKey(FSNReunionConnector, on_delete=models.CASCADE)
code_application = models.CharField(max_length=20)
sequence = models.CharField(max_length=16)
periode = models.CharField(max_length=35)
siren = models.CharField(max_length=11)
nom1 = models.CharField(max_length=35, blank=True)
nom2 = models.CharField(max_length=35, blank=True)
nb_salarie = models.CharField(max_length=2)
rue = models.CharField(max_length=35, blank=True)
boite_postale = models.CharField(max_length=10, blank=True)
code_postale = models.CharField(max_length=10, blank=True)
ville = models.CharField(max_length=35, blank=True)
code_pays = models.CharField(max_length=2)
region = models.CharField(max_length=3)
iban = models.CharField(max_length=34)
montant = models.FloatField()
devise = models.CharField(max_length=3)
domaine_activite = models.CharField(max_length=4)
societe = models.CharField(max_length=4)
nb_dp = models.CharField(max_length=10)
date_dp = models.CharField(max_length=10)
date_paiement = models.CharField(max_length=10)
nom_demandeur = models.CharField(max_length=35, blank=True)
prenom_demandeur = models.CharField(max_length=35, blank=True)
qualite = models.CharField(max_length=35, blank=True)
tel = models.CharField(max_length=30, blank=True)
code_ape = models.CharField(max_length=10, blank=True)
courriel = models.CharField(max_length=241, blank=True)
last_update_datetime = models.DateTimeField(auto_now=True)
class DSDossier(models.Model):
class Meta:
unique_together = (('resource', 'ds_id'),)
resource = models.ForeignKey(
FSNReunionConnector, on_delete=models.CASCADE, related_name='dossiers'
)
ds_id = models.CharField(max_length=256)
ds_state = models.CharField(max_length=256)
csv_file = models.FileField(upload_to=csv_file_location)
csv_filename = models.CharField(max_length=256)
csv_checksum = models.CharField(max_length=256)
last_update_datetime = models.DateTimeField(auto_now=True)
def to_json(self, request=None):
csv_file_url = reverse(
'generic-endpoint',
kwargs={
'connector': self.resource.get_connector_slug(),
'slug': self.resource.slug,
'endpoint': 'get-dossier-file'
}
) + '/%s/' % self.id
if request is not None:
csv_file_url = request.build_absolute_uri(csv_file_url)
return {
'id': str(self.pk),
'ds_id': self.ds_id,
'ds_state': self.ds_state,
'csv_filename': self.csv_filename,
'csv_file': csv_file_url,
'csv_checksum': self.csv_checksum,
'last_update_datetime': self.last_update_datetime
}
def get_content_without_bom(self):
self.csv_file.seek(0)
content = self.csv_file.read()
return force_str(content.decode('utf-8-sig', 'ignore').encode('utf-8'))
def get_rows(self):
content = self.get_content_without_bom()
reader = csv.reader(content.splitlines(), delimiter=';')
rows = list(reader)
if not rows:
return []
rows = [[smart_text(x) for x in y] for y in rows if y]
titles = [t.strip() for t in COLUMNS_KEYNAMES]
if len(rows[0]) == 26: # CSV file without "code_ape"
titles.pop(titles.index('code_ape'))
indexes = [titles.index(t) for t in titles if t]
caption = [titles[i] for i in indexes]
def get_cell(row, index):
try:
return row[index]
except IndexError:
return None
return [{caption: get_cell(row, index) for caption, index in zip(caption, indexes)} for row in rows]
class Batch(models.Model):
class Meta:
unique_together = (('resource', 'batch_date'),)
ordering = ('batch_date',)
resource = models.ForeignKey(
FSNReunionConnector, on_delete=models.CASCADE, related_name='batches'
)
batch_date = models.DateField()
last_update_datetime = models.DateTimeField(auto_now=True)
def to_json(self):
return {
'batch_date': self.batch_date,
'last_update_datetime': self.last_update_datetime,
'url': self.get_absolute_url()
}
def get_absolute_url(self):
return reverse(
'generic-endpoint',
kwargs={
'connector': self.resource.get_connector_slug(),
'slug': self.resource.slug,
'endpoint': 'batch'
}
) + '/%s/' % self.pk
class BatchFile(models.Model):
batch = models.ForeignKey(
Batch, on_delete=models.CASCADE, related_name='files'
)
csv_file = models.FileField(upload_to=batch_csv_file_location)
csv_filename = models.CharField(max_length=256)
ready = models.BooleanField(default=False)
last_update_datetime = models.DateTimeField(auto_now=True)
def to_json(self):
return {
'csv_filename': self.csv_filename,
'ready': self.ready,
'last_update_datetime': self.last_update_datetime,
'url': self.get_absolute_url()
}
def get_absolute_url(self):
return reverse(
'generic-endpoint',
kwargs={
'connector': self.batch.resource.get_connector_slug(),
'slug': self.batch.resource.slug,
'endpoint': 'batchfile'
}
) + '/%s/' % self.pk