add batches endpoints
This commit is contained in:
parent
f53a09cc1d
commit
3c61c8876f
|
@ -7,6 +7,8 @@ Standards-Version: 3.9.1
|
|||
|
||||
Package: python-passerelle-reunion-fsn
|
||||
Architecture: all
|
||||
Depends: ${misc:Depends}, ${python:Depends}
|
||||
Depends: ${misc:Depends},
|
||||
${python:Depends},
|
||||
python-unicodecsv
|
||||
Description: Passerelle CR Reunion FSN
|
||||
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.18 on 2020-04-24 15:37
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import jsonfield.fields
|
||||
import passerelle_reunion_fsn.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passerelle_reunion_fsn', '0003_csv_checksum'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Batch',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('batch_date', models.DateField()),
|
||||
('last_update_datetime', models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('batch_date',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='BatchFile',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('csv_file', models.FileField(upload_to=passerelle_reunion_fsn.models.batch_csv_file_location)),
|
||||
('csv_filename', models.CharField(max_length=256)),
|
||||
('ready', models.BooleanField(default=False)),
|
||||
('last_update_datetime', models.DateTimeField(auto_now=True)),
|
||||
('batch', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='files', to='passerelle_reunion_fsn.Batch')),
|
||||
],
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='fsnreunionconnector',
|
||||
name='wcs_form_slug',
|
||||
field=models.CharField(blank=True, max_length=256, verbose_name='WCS form slug'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='fsnreunionconnector',
|
||||
name='wcs_options',
|
||||
field=jsonfield.fields.JSONField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='batch',
|
||||
name='resource',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='batches', to='passerelle_reunion_fsn.FSNReunionConnector'),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='batch',
|
||||
unique_together=set([('resource', 'batch_date')]),
|
||||
),
|
||||
]
|
|
@ -17,21 +17,52 @@
|
|||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import base64
|
||||
import csv
|
||||
from datetime import datetime
|
||||
import hashlib
|
||||
from io import BytesIO
|
||||
import os
|
||||
import os.path
|
||||
import tempfile
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.files import File
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.db import models, transaction
|
||||
from django.http import FileResponse
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.encoding import force_str, smart_text
|
||||
from django.http import FileResponse, HttpResponse
|
||||
from django.utils import dateformat, six
|
||||
from django.utils.dateparse import parse_date, parse_datetime
|
||||
from django.utils.encoding import force_str, force_text, smart_text
|
||||
from django.utils.six.moves.urllib import parse as urlparse
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
import jsonfield
|
||||
from passerelle.base.models import BaseResource
|
||||
from passerelle.base.signature import sign_url
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
|
||||
if six.PY3:
|
||||
import csv
|
||||
else:
|
||||
import unicodecsv as csv
|
||||
|
||||
|
||||
GENERATE_BATCH_SCHEMA = {
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "Generate batch",
|
||||
"description": "",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"batch_date": {
|
||||
"description": "Bacth date",
|
||||
"type": "string",
|
||||
},
|
||||
"force": {
|
||||
"description": "Force",
|
||||
"type": "boolean",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
COLUMNS_KEYNAMES = [
|
||||
'code_application',
|
||||
|
@ -67,6 +98,14 @@ def csv_file_location(instance, filename):
|
|||
return 'fsn_reunion/%s/%s' % (instance.ds_id, filename)
|
||||
|
||||
|
||||
def batch_csv_file_location(instance, filename):
|
||||
return 'fsn_reunion/%s/batch/%s/%s/%s' % (
|
||||
instance.batch.resource.id,
|
||||
dateformat.format(instance.batch.batch_date, 'Y-m-d'),
|
||||
instance.id, filename
|
||||
)
|
||||
|
||||
|
||||
class FSNReunionConnector(BaseResource):
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
@ -81,6 +120,14 @@ class FSNReunionConnector(BaseResource):
|
|||
max_length=256, blank=True, verbose_name=_('Instructeur identifier'),
|
||||
help_text=_('Region identifier for this case')
|
||||
)
|
||||
wcs_form_slug = models.CharField(max_length=256, blank=True, verbose_name=_('WCS form slug'))
|
||||
wcs_options = jsonfield.JSONField(null=True, blank=True)
|
||||
|
||||
# def daily(self):
|
||||
# self.logger.info('start generate batch')
|
||||
# for batch in self._generate_batches():
|
||||
# self.logger.info('created batch %s' % batch.batch_date)
|
||||
# self.logger.info('end generate batch')
|
||||
|
||||
def _ds_call(self, query, variables):
|
||||
headers = {
|
||||
|
@ -105,6 +152,58 @@ class FSNReunionConnector(BaseResource):
|
|||
|
||||
return json['data']
|
||||
|
||||
def _wcs_call(self, filters={}):
|
||||
if not getattr(settings, 'KNOWN_SERVICES', {}).get('wcs'):
|
||||
raise APIError('No wcs found')
|
||||
|
||||
wcs_service = list(settings.KNOWN_SERVICES['wcs'].values())[0]
|
||||
if self.wcs_options and 'instance' in self.wcs_options:
|
||||
wcs_service = self.wcs_options['instance']
|
||||
base_url = wcs_service['url']
|
||||
orig = wcs_service.get('orig')
|
||||
secret = wcs_service.get('secret')
|
||||
limit = 10
|
||||
params = {
|
||||
'orig': orig,
|
||||
'full': 'on',
|
||||
'limit': limit,
|
||||
'order_by': '-receipt_time'
|
||||
}
|
||||
params.update(self.wcs_options.get('filters', {}))
|
||||
params.update(filters)
|
||||
|
||||
offset = 0
|
||||
has_data = True
|
||||
while has_data:
|
||||
params['offset'] = offset
|
||||
query_string = urlparse.urlencode(params)
|
||||
api_url = sign_url(
|
||||
urlparse.urljoin(
|
||||
base_url,
|
||||
'api/forms/%s/list?%s' % (self.wcs_form_slug, query_string)
|
||||
),
|
||||
key=secret
|
||||
)
|
||||
response = self.requests.get(api_url)
|
||||
if response.status_code != 200:
|
||||
raise APIError('Error fetching data from wcs')
|
||||
data = response.json()
|
||||
if not data:
|
||||
has_data = False
|
||||
else:
|
||||
for form in data:
|
||||
yield form
|
||||
offset += limit
|
||||
|
||||
@endpoint(
|
||||
methods=['get'], perm='can_access', name='wcs-call',
|
||||
description=_('Call wcs debug')
|
||||
)
|
||||
def wcs_call(self, request):
|
||||
return {
|
||||
'data': [form for form in self._wcs_call()]
|
||||
}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'], perm='can_access', name='dsproxy-get-instructeurs',
|
||||
description=_('DS Proxy: get instructeurs')
|
||||
|
@ -418,6 +517,216 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first:
|
|||
'data': {}
|
||||
}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'], perm='can_access', name='get-batches',
|
||||
description=_('Get batches')
|
||||
)
|
||||
def get_batches(self, request):
|
||||
res = []
|
||||
for batch in self.batches.all():
|
||||
res.append(
|
||||
{
|
||||
'id': batch.pk,
|
||||
'text': dateformat.format(batch.batch_date, 'l d F Y'),
|
||||
'url': request.build_absolute_uri(batch.get_absolute_url())
|
||||
}
|
||||
)
|
||||
return {'data': res}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'], perm='can_access', name='batchfile',
|
||||
pattern='^(?P<batchfile_pk>\w+)/$', example_pattern='{batchfile_pk}/',
|
||||
parameters={
|
||||
'batchfile_pk': {
|
||||
'description': _('Batch file identifier'),
|
||||
'example_value': '2'
|
||||
}
|
||||
}, description=_('Get batch file')
|
||||
|
||||
)
|
||||
def batchfile(self, request, batchfile_pk):
|
||||
try:
|
||||
batch_file = BatchFile.objects.get(pk=batchfile_pk)
|
||||
except BatchFile.DoesNotExist:
|
||||
raise APIError('Unkwon batch file identifier')
|
||||
response = HttpResponse(
|
||||
batch_file.csv_file.read(), content_type="text/csv"
|
||||
)
|
||||
response['Content-Disposition'] = 'attachment; filename="%s"' % batch_file.csv_filename
|
||||
return response
|
||||
|
||||
@endpoint(
|
||||
methods=['get'], perm='can_access', name='batch',
|
||||
description=_('Get batch'),
|
||||
pattern='^(?P<batch_pk>\w+)/$', example_pattern='{batch_pk}/',
|
||||
parameters={
|
||||
'batch_pk': {
|
||||
'description': _('Batch identifier'),
|
||||
'example_value': '2'
|
||||
}
|
||||
}
|
||||
)
|
||||
def batch(self, request, batch_pk):
|
||||
try:
|
||||
batch = self.batches.get(pk=batch_pk)
|
||||
except Batch.DoesNotExist:
|
||||
raise APIError('Batch does not exist')
|
||||
|
||||
batch_file = batch.files.filter(ready=True).order_by('-last_update_datetime').first()
|
||||
if not batch_file:
|
||||
raise APIError('No file available')
|
||||
|
||||
res = {}
|
||||
batch_files = []
|
||||
for i, batchfile in enumerate(batch.files.filter(ready=True).order_by('-last_update_datetime')):
|
||||
batchfile_json = batchfile.to_json()
|
||||
batchfile_json['url'] = request.build_absolute_uri(batchfile_json['url'])
|
||||
if i == 0:
|
||||
res['last_file'] = batchfile_json
|
||||
batch_files.append(batchfile_json)
|
||||
|
||||
res['batch_files'] = batch_files
|
||||
res.update(batch.to_json())
|
||||
res['url'] = request.build_absolute_uri(res['url'])
|
||||
return {
|
||||
'data': res
|
||||
}
|
||||
|
||||
@endpoint(
|
||||
perm='can_access', name='generate-batches',
|
||||
post={
|
||||
'description': _('Generate batches'),
|
||||
'request_body': {
|
||||
'schema': {
|
||||
'application/json': GENERATE_BATCH_SCHEMA
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
def generate_batches(self, request, post_data):
|
||||
batch_date = post_data.get('batch_date')
|
||||
force = post_data.get('force', False)
|
||||
|
||||
def abs_url(data):
|
||||
data['url'] = request.build_absolute_uri(data['url'])
|
||||
return data
|
||||
|
||||
return {
|
||||
'data': [abs_url(batch.to_json()) for batch in self._generate_batches(batch_date, force)]
|
||||
|
||||
}
|
||||
|
||||
@transaction.atomic
|
||||
def _generate_batches(self, batch_date=None, force=False):
|
||||
target_date_obj = None
|
||||
if batch_date is not None:
|
||||
target_date_obj = parse_date(batch_date)
|
||||
if target_date_obj is None:
|
||||
raise APIError("Can't parse batch identifier")
|
||||
force = True
|
||||
|
||||
code_app = 'TST003'
|
||||
code_region = 'R04'
|
||||
code_pays = 'FR'
|
||||
devise = 'EUR'
|
||||
|
||||
def get_data(fields, field_name):
|
||||
return fields[field_name] or ''
|
||||
|
||||
def write_row(writer, row):
|
||||
writer.writerow([force_text(i) for i in row])
|
||||
|
||||
def add_target_batch(refs, target):
|
||||
fd, temp_file_name = tempfile.mkstemp()
|
||||
os.close(fd)
|
||||
if six.PY3:
|
||||
f = open(temp_file_name, 'w', encoding='utf-8')
|
||||
refs[target] = (csv.writer(f, delimiter=';'), f, temp_file_name)
|
||||
else:
|
||||
f = open(temp_file_name, 'wb')
|
||||
refs[target] = (csv.writer(f, delimiter=';', encoding='utf-8'), f, temp_file_name)
|
||||
|
||||
target_batches = {}
|
||||
now = datetime.now().date()
|
||||
for form in self._wcs_call():
|
||||
avis = form['workflow']['fields']['avis_favorable_defavorable']
|
||||
if not avis:
|
||||
raise APIError('Missing avis')
|
||||
if avis != 'Favorable':
|
||||
continue
|
||||
date_raw = form['workflow']['fields']['date_avis_favorable']
|
||||
if not date_raw:
|
||||
raise APIError("Missing date")
|
||||
date_obj = parse_date(date_raw)
|
||||
if date_obj is None:
|
||||
raise APIError("Can't parse date")
|
||||
|
||||
# FOR TEST because no accepted form yet
|
||||
# receipt_time = form['receipt_time']
|
||||
# if not receipt_time:
|
||||
# raise APIError("Missing receipt_time")
|
||||
# date_obj = parse_datetime(receipt_time).date()
|
||||
# if date_obj is None:
|
||||
# raise APIError("Can't parse date")
|
||||
|
||||
if date_obj >= now:
|
||||
continue
|
||||
if target_date_obj and date_obj != target_date_obj:
|
||||
continue
|
||||
try:
|
||||
batch = self.batches.get(batch_date=date_obj)
|
||||
if force:
|
||||
if batch not in target_batches:
|
||||
add_target_batch(target_batches, batch)
|
||||
else:
|
||||
continue
|
||||
except Batch.DoesNotExist:
|
||||
batch = Batch.objects.create(resource=self, batch_date=date_obj)
|
||||
add_target_batch(target_batches, batch)
|
||||
|
||||
# write data
|
||||
form_fields = form['fields']
|
||||
workflow_fields = form['workflow']['fields']
|
||||
sequence = "%s-%s" % (code_region, form['display_id'])
|
||||
period = 'MARS-VOLET2'
|
||||
siren = form_fields['siren']
|
||||
nom1 = form_fields['nom_responsable'] + form_fields['prenom_responsable']
|
||||
nom2 = ''
|
||||
nb_salaries = form_fields['nb_salaries']
|
||||
rue = form_fields['numero_voie']
|
||||
bp = get_data(form_fields, 'boite_postale')
|
||||
cp = get_data(form_fields, 'code_postal')
|
||||
ville = get_data(form_fields, 'ville')
|
||||
iban = get_data(form_fields, 'iban')
|
||||
montant = get_data(workflow_fields, 'montant_aide_volet_2')
|
||||
nom_demandeur = form_fields['nom_demandeur']
|
||||
prenom_demandeur = form_fields['prenom_demandeur']
|
||||
qualite = ''
|
||||
tel = get_data(form_fields, 'telephone_demandeur')
|
||||
courriel = get_data(form_fields, 'courriel_demandeur')
|
||||
|
||||
write_row(
|
||||
target_batches[batch][0],
|
||||
[
|
||||
code_app, sequence, period, siren, nom1, nom2, nb_salaries, rue,
|
||||
bp, cp, ville, code_pays, code_region, iban, montant, devise, nom_demandeur,
|
||||
prenom_demandeur, qualite, tel, courriel
|
||||
]
|
||||
)
|
||||
|
||||
# create batch file objects
|
||||
for batch, (csv_writer, f, temp_file_name) in target_batches.items():
|
||||
f.close()
|
||||
with open(temp_file_name, 'rb') as tf:
|
||||
csv_filename = '%s.csv' % dateformat.format(batch.batch_date, 'Y-m-d')
|
||||
batch_file = BatchFile.objects.create(batch=batch, csv_filename=csv_filename)
|
||||
batch_file.csv_file.save(csv_filename, File(tf))
|
||||
batch_file.ready = True
|
||||
batch_file.save()
|
||||
os.unlink(temp_file_name)
|
||||
|
||||
return [batch for batch in target_batches.keys()]
|
||||
|
||||
|
||||
class Entreprise(models.Model):
|
||||
|
||||
|
@ -516,3 +825,62 @@ class DSDossier(models.Model):
|
|||
return None
|
||||
|
||||
return [{caption: get_cell(row, index) for caption, index in zip(caption, indexes)} for row in rows]
|
||||
|
||||
|
||||
class Batch(models.Model):
|
||||
|
||||
class Meta:
|
||||
unique_together = (('resource', 'batch_date'),)
|
||||
ordering = ('batch_date',)
|
||||
|
||||
resource = models.ForeignKey(
|
||||
FSNReunionConnector, on_delete=models.CASCADE, related_name='batches'
|
||||
)
|
||||
batch_date = models.DateField()
|
||||
last_update_datetime = models.DateTimeField(auto_now=True)
|
||||
|
||||
def to_json(self):
|
||||
return {
|
||||
'batch_date': self.batch_date,
|
||||
'last_update_datetime': self.last_update_datetime,
|
||||
'url': self.get_absolute_url()
|
||||
}
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse(
|
||||
'generic-endpoint',
|
||||
kwargs={
|
||||
'connector': self.resource.get_connector_slug(),
|
||||
'slug': self.resource.slug,
|
||||
'endpoint': 'batch'
|
||||
}
|
||||
) + '/%s/' % self.pk
|
||||
|
||||
|
||||
class BatchFile(models.Model):
|
||||
|
||||
batch = models.ForeignKey(
|
||||
Batch, on_delete=models.CASCADE, related_name='files'
|
||||
)
|
||||
csv_file = models.FileField(upload_to=batch_csv_file_location)
|
||||
csv_filename = models.CharField(max_length=256)
|
||||
ready = models.BooleanField(default=False)
|
||||
last_update_datetime = models.DateTimeField(auto_now=True)
|
||||
|
||||
def to_json(self):
|
||||
return {
|
||||
'csv_filename': self.csv_filename,
|
||||
'ready': self.ready,
|
||||
'last_update_datetime': self.last_update_datetime,
|
||||
'url': self.get_absolute_url()
|
||||
}
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse(
|
||||
'generic-endpoint',
|
||||
kwargs={
|
||||
'connector': self.batch.resource.get_connector_slug(),
|
||||
'slug': self.batch.resource.slug,
|
||||
'endpoint': 'batchfile'
|
||||
}
|
||||
) + '/%s/' % self.pk
|
||||
|
|
Reference in New Issue