Initial import of source code (new models: AtrealOpenads, ForwardFile)
This commit is contained in:
commit
0d4b51e9ba
|
@ -0,0 +1,52 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.15 on 2019-04-30 12:07
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import jsonfield.fields
|
||||
import passerelle.apps.atreal_openads.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('base', '0012_job'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AtrealOpenads',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('myjobs', jsonfield.fields.JSONField(default={})),
|
||||
('collectivite', models.CharField(blank=True, default=b'', help_text='ex: Marseille, ou ex: 3', max_length=255, verbose_name='Collectivity (identifier)')),
|
||||
('openADS_API_key', models.CharField(default=b'', help_text='ex: ah9pGbKKHv5ToF3cPQuV', max_length=255, verbose_name='openADS API key (secret)')),
|
||||
('openADS_API_url', models.URLField(default=b'', help_text='ex: https://openads.your_domain.net/api/', max_length=255, verbose_name='openADS API URL')),
|
||||
('users', models.ManyToManyField(blank=True, to='base.ApiUser')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'openADS',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ForwardFile',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('numero_demande', models.CharField(max_length=20)),
|
||||
('numero_dossier', models.CharField(max_length=20)),
|
||||
('type_fichier', models.CharField(max_length=10)),
|
||||
('file_hash', models.CharField(blank=True, default=b'', max_length=100)),
|
||||
('orig_filename', models.CharField(blank=True, default=b'', max_length=100)),
|
||||
('content_type', models.CharField(blank=True, default=b'', max_length=100)),
|
||||
('upload_file', models.FileField(null=True, upload_to=passerelle.apps.atreal_openads.models.get_upload_path)),
|
||||
('upload_status', models.CharField(blank=True, default=b'', max_length=10)),
|
||||
('upload_msg', models.CharField(blank=True, default=b'', max_length=255)),
|
||||
('last_update_datetime', models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
),
|
||||
]
|
|
@ -0,0 +1,669 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# passerelle - uniform access to multiple data sources and services
|
||||
# Copyright (C) 2018 Entr'ouvert
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import json
|
||||
import base64
|
||||
import urlparse
|
||||
|
||||
from django.db import models
|
||||
from django.http import Http404, HttpResponse, FileResponse
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from passerelle.base.models import BaseResource
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
|
||||
import jsonfield
|
||||
import os
|
||||
|
||||
import re
|
||||
from HTMLParser import HTMLParser
|
||||
|
||||
import magic
|
||||
import hashlib
|
||||
from django.core.files import File
|
||||
|
||||
# TODO remove (only for debuging/development)
|
||||
import time
|
||||
import copy
|
||||
|
||||
|
||||
class MLStripper(HTMLParser):
|
||||
"""HTML parser that removes html tags."""
|
||||
def __init__(self):
|
||||
self.reset()
|
||||
self.fed = []
|
||||
def handle_data(self, d):
|
||||
self.fed.append(d)
|
||||
def get_data(self):
|
||||
return ''.join(self.fed)
|
||||
|
||||
|
||||
def strip_tags(html):
|
||||
"""Remove html tags from a string."""
|
||||
s = MLStripper()
|
||||
s.feed(html)
|
||||
return s.get_data()
|
||||
|
||||
|
||||
def clean_spaces(text):
|
||||
"""Remove extra spaces an line breaks from a string."""
|
||||
text = text.replace('\n', ' ')
|
||||
text = text.replace('\r', ' ')
|
||||
text = text.replace('\t', ' ')
|
||||
text = text.replace('\\n', ' ')
|
||||
text = text.replace('\\r', ' ')
|
||||
text = text.replace('\\t', ' ')
|
||||
return re.sub(r' +', ' ', text).strip()
|
||||
|
||||
|
||||
dossier_payload = {
|
||||
"collectivite": 3,
|
||||
"terrain": {
|
||||
"numero_voie": 15,
|
||||
"nom_voie": "boulevard Layglize",
|
||||
"lieu_dit": "Li Corne",
|
||||
"code_postal": 13014,
|
||||
"localite": "Marseille",
|
||||
"references_cadastrales": [
|
||||
{
|
||||
"prefixe": "696",
|
||||
"section": "M",
|
||||
"numero": "0012"
|
||||
}
|
||||
]
|
||||
},
|
||||
"demandeurs": [
|
||||
{
|
||||
"type_personne": "particulier",
|
||||
"typologie": "petitionnaire",
|
||||
"nom": "Khaleesi",
|
||||
"prenom": "Daenerys",
|
||||
"adresse": {
|
||||
"numero_voie": 1,
|
||||
"nom_voie": "boulevard Targaryen",
|
||||
"lieu_dit": "Dothraki Sea",
|
||||
"code_postal": 13888,
|
||||
"localite": "Marshhh"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
TEST_FILE_TRAC_ICO = '/vagrant/test_files/trac.ico'
|
||||
TEST_FILE_CERFA_DIA = '/vagrant/test_files/cerfa_10072-02.pdf'
|
||||
TEST_FILE_PLAN_CADASTRAL = '/vagrant/test_files/plancadastral.pdf'
|
||||
|
||||
|
||||
def get_file_data(path, b64=True):
|
||||
"""Return the content of a file as a string, in base64 if specified."""
|
||||
with open(path, 'r') as f:
|
||||
if b64:
|
||||
return base64.b64encode(f.read())
|
||||
return f.read()
|
||||
|
||||
|
||||
def get_upload_path(instance, filename):
|
||||
"""Return a relative upload path for a file."""
|
||||
#return 'pass_openADS_up_%s' % instance.file_hash
|
||||
return 'pass_openADS_up'
|
||||
|
||||
|
||||
class ForwardFile(models.Model):
|
||||
"""Represent a file uploaded by a user, to be forwarded to openADS.API."""
|
||||
numero_demande = models.CharField(max_length=20)
|
||||
numero_dossier = models.CharField(max_length=20)
|
||||
type_fichier = models.CharField(max_length=10)
|
||||
file_hash = models.CharField(max_length=100, default='', blank=True)
|
||||
orig_filename = models.CharField(max_length=100, default='', blank=True)
|
||||
content_type = models.CharField(max_length=100, default='', blank=True)
|
||||
upload_file = models.FileField(upload_to=get_upload_path, null=True)
|
||||
upload_status = models.CharField(max_length=10, default='', blank=True)
|
||||
upload_msg = models.CharField(max_length=255, default='', blank=True)
|
||||
last_update_datetime = models.DateTimeField(auto_now=True)
|
||||
|
||||
|
||||
class AtrealOpenads(BaseResource):
|
||||
"""API that proxy/relay communications with/to openADS."""
|
||||
myjobs = jsonfield.JSONField(default={})
|
||||
collectivite = models.CharField(_('Collectivity (identifier)'), max_length=255,
|
||||
help_text=_('ex: Marseille, ou ex: 3'), default='', blank=True)
|
||||
openADS_API_key = models.CharField(_('openADS API key (secret)'), max_length=255,
|
||||
help_text=_('ex: ah9pGbKKHv5ToF3cPQuV'), default='')
|
||||
openADS_API_url = models.URLField(_('openADS API URL'), max_length=255,
|
||||
help_text=_('ex: https://openads.your_domain.net/api/'), default='')
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
||||
api_description = _('''
|
||||
This API provides exchanges with openADS.
|
||||
''')
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('openADS')
|
||||
|
||||
@endpoint(
|
||||
description="[DEV] Return 'Hello <name>'",
|
||||
pattern='^(?P<name>\w+)/?$',
|
||||
example_pattern='{name}/',
|
||||
parameters={
|
||||
'name': {'description': _('Name'), 'example_value': 'John'}
|
||||
})
|
||||
def hello(self, request, name='world', **kwargs):
|
||||
return {
|
||||
'data': {
|
||||
'hello': name
|
||||
}
|
||||
}
|
||||
|
||||
@endpoint(
|
||||
description="[DEV] Return what it has received",
|
||||
methods=['get','post'],
|
||||
parameters={
|
||||
'body': {'description': _('Dump body'), 'example_value': 'True'},
|
||||
'COOKIES': {'description': _('Dump cookies'), 'example_value': 'True'},
|
||||
'META': {'description': _('Dump meta'), 'example_value': 'True'},
|
||||
'empty': {'description': _('Dump empty values'), 'example_value': 'True'}
|
||||
})
|
||||
def echo(self, request, body=False, cookies=False, meta=False, empty=False, **kwargs):
|
||||
req_infos = {}
|
||||
if body:
|
||||
v = getattr(request, 'body')
|
||||
if (v and len(v)) or empty:
|
||||
req_infos['body'] = v
|
||||
for k in ['scheme', 'method', 'encoding', 'content_type', 'content_params', 'GET', 'POST', 'COOKIES', 'FILES', 'META']:
|
||||
if k not in ['GET', 'POST', 'COOKIES', 'FILES', 'META']:
|
||||
v = getattr(request, k)
|
||||
if (v and len(v)) or empty:
|
||||
req_infos[k] = v
|
||||
elif k == 'FILES':
|
||||
up_files = getattr(request, k)
|
||||
if (up_files and len(up_files)) or empty:
|
||||
req_infos[k] = []
|
||||
for f in up_files:
|
||||
req_infos[k].append({name: f.name, size: f.size, content_type: f.content_type, charset: f.charset, temporary_file_path: f.temporary_file_path()})
|
||||
elif (k != 'COOKIES' or cookies) and (k != 'META' or meta):
|
||||
it = getattr(request, k).items()
|
||||
if (it and len(it)) or empty:
|
||||
req_infos[k] = {}
|
||||
for p, v in it:
|
||||
if isinstance(v, str) and ((v and len(v)) or empty):
|
||||
req_infos[k][p] = v
|
||||
return {
|
||||
'data': {
|
||||
'received': json.dumps(req_infos)
|
||||
}
|
||||
}
|
||||
|
||||
@endpoint(
|
||||
description="[DEV] Return the file it has received encoded in base64",
|
||||
methods=['post'])
|
||||
def echofile(self, request, *args, **kwargs):
|
||||
self.logger.info("echofile() request.content_type = '%s'" % request.content_type)
|
||||
self.logger.info("echofile() len(request.body) = '%d'" % len(request.body))
|
||||
if request.content_type == 'application/json' and len(request.body):
|
||||
json_data = json.loads(request.body)
|
||||
self.logger.info("echofile() 'url' in json = '%s'" % str('url' in json_data))
|
||||
if 'url' in json_data:
|
||||
url = json_data['url']
|
||||
self.logger.info("echofile() url = '%s'" % url)
|
||||
|
||||
try:
|
||||
response = self.requests.get(url)
|
||||
except requests.RequestException as e:
|
||||
raise APIError(
|
||||
'API-WCS connection error: %s' % response.status_code,
|
||||
data={
|
||||
'url': url,
|
||||
'error': six.text_type(e)
|
||||
})
|
||||
|
||||
self.logger.info("echofile() response is '%s' (%s)" % (response, response.__class__))
|
||||
|
||||
self.logger.info("echofile() response.status_code = '%s'" % response.status_code)
|
||||
if response.status_code != 200:
|
||||
raise APIError(
|
||||
'API-WCS returned a non 200 status %s: %s' % response.status_code,
|
||||
data={
|
||||
'status_code': response.status_code,
|
||||
'url': url
|
||||
})
|
||||
|
||||
if 'content-type' in response.headers:
|
||||
self.logger.info("echofile() response['content-type'] = '%s'" % response.headers['content-type'])
|
||||
if 'content-disposition' in response.headers:
|
||||
self.logger.info("echofile() response['content-disposition'] = '%s'" % response.headers['content-disposition'])
|
||||
self.logger.info("echofile() response.content[:50] = '%s'" % response.content[:50])
|
||||
return {
|
||||
'content_type': response.content_type if hasattr(response, 'content_type') else 'application/octet-stream',
|
||||
'content': base64.b64encode(response.content)
|
||||
}
|
||||
|
||||
raise ValueError("invalid request payload (no 'url' key found)")
|
||||
|
||||
raise ValueError("invalid content type of request '%s' (but must be '%s')" % (request.content_type, 'application/json'))
|
||||
|
||||
|
||||
@endpoint(
|
||||
description="[DEV] Return a file structure with its content in base64 from an hardcoded file",
|
||||
pattern='^(?P<format>\w+)/?$',
|
||||
example_pattern='{format}/',
|
||||
parameters={
|
||||
'format': {'description': _('Format'), 'example_value': 'jsondata'}
|
||||
})
|
||||
def afile(self, request, format='json', **kwargs):
|
||||
rand_id = base64.urlsafe_b64encode(os.urandom(6))
|
||||
self.add_job('ajob', natural_id=rand_id, dossier_id=rand_id)
|
||||
if format == 'base64':
|
||||
return TRAC_ICO_B64_CONTENT
|
||||
elif format[:4] == 'json':
|
||||
json = {
|
||||
'afile': {
|
||||
'filename': 'trac.ico',
|
||||
'content_type': 'image/x-icon',
|
||||
'b64_content': TRAC_ICO_B64_CONTENT
|
||||
},
|
||||
'extra_info': 'blabla'
|
||||
}
|
||||
if format == 'jsondata':
|
||||
return { 'data': json }
|
||||
return json
|
||||
else:
|
||||
raw_content = base64.b64decode(TRAC_ICO_B64_CONTENT)
|
||||
return HttpResponse(raw_content, content_type='image/x-icon')
|
||||
#return FileResponse(raw_content, content_type='image/x-icon')
|
||||
|
||||
def ajob(self, dossier_id, *args, **kwargs):
|
||||
"""A test job."""
|
||||
self.myjobs[dossier_id] = 'started'
|
||||
self.save()
|
||||
self.logger.info("Started ajob() %s" % dossier_id)
|
||||
for i in range(10):
|
||||
self.logger.info("Updated ajob() %s" % dossier_id)
|
||||
self.myjobs[dossier_id] = 'running'
|
||||
self.save()
|
||||
time.sleep(10)
|
||||
self.logger.info("Ended ajob() %s" % dossier_id)
|
||||
self.myjobs[dossier_id] = 'ended'
|
||||
self.save()
|
||||
|
||||
def check_status(self):
|
||||
"""Check avaibility of the openADS.API service."""
|
||||
url = urlparse.urljoin(self.openADS_API_url, '__api__')
|
||||
response = self.requests.get(url)
|
||||
response.raise_for_status()
|
||||
return {'response': response.status_code}
|
||||
|
||||
@endpoint(
|
||||
description="Create an openADS 'dossier' (harcoded for now)",
|
||||
pattern='^(?P<type_dossier>\w+)/?$',
|
||||
example_pattern='{type_dossier}/',
|
||||
parameters={
|
||||
'type_dossier': {'description': _('Type de dossier'), 'example_value': 'DIA'}
|
||||
})
|
||||
def create_dossier(self, request, type_dossier, *args, **kwargs):
|
||||
payload = {
|
||||
"collectivite": self.collectivite,
|
||||
"terrain": {
|
||||
"numero_voie": 15,
|
||||
"nom_voie": "boulevard Layglize",
|
||||
"lieu_dit": "Li Corne",
|
||||
"code_postal": 13014,
|
||||
"localite": "Marseille",
|
||||
"references_cadastrales": [
|
||||
{
|
||||
"prefixe": "999",
|
||||
"section": "Z",
|
||||
"numero": "0010"
|
||||
},
|
||||
{
|
||||
"prefixe": "696",
|
||||
"section": "M",
|
||||
"numero": "0012"
|
||||
}
|
||||
]
|
||||
},
|
||||
"demandeurs": [
|
||||
{
|
||||
"type_personne": "particulier",
|
||||
"typologie": "petitionnaire",
|
||||
"nom": "Neige",
|
||||
"prenom": "Jean",
|
||||
"adresse": {
|
||||
"numero_voie": 8,
|
||||
"nom_voie": "boulevard Stark",
|
||||
"lieu_dit": "Castleblack",
|
||||
"code_postal": 13666,
|
||||
"localite": "Marsnuit"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type_personne": "particulier",
|
||||
"typologie": "petitionnaire",
|
||||
"nom": "Khaleesi",
|
||||
"prenom": "Daenerys",
|
||||
"adresse": {
|
||||
"numero_voie": 1,
|
||||
"nom_voie": "boulevard Targaryen",
|
||||
"lieu_dit": "Dothraki Sea",
|
||||
"code_postal": 13888,
|
||||
"localite": "Marshhh"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
url = urlparse.urljoin(self.openADS_API_url, '/dossiers/%s' % type_dossier)
|
||||
response = self.requests.post(url, json=payload)
|
||||
if response.status_code // 100 != 2:
|
||||
raise APIError(self.get_response_error(response))
|
||||
try:
|
||||
result = response.json()
|
||||
except ValueError:
|
||||
raise APIError('No JSON content returned: %r' % response.content[:1000])
|
||||
numero_dossier = result.get('numero_dossier')
|
||||
recepisse = result.get('files')[0]
|
||||
try:
|
||||
recepisse_content = base64.b64decode(recepisse['b64_content'])
|
||||
except TypeError:
|
||||
raise APIError('Invalid content for recepisse')
|
||||
|
||||
files = [TEST_FILE_CERFA_DIA, TEST_FILE_PLAN_CADASTRAL]
|
||||
if files:
|
||||
file_ids = []
|
||||
for f in files:
|
||||
fwd_file = self.upload2ForwardFile(f, numero_dossier)
|
||||
fwd_file.save()
|
||||
file_ids.append(fwd_file.id)
|
||||
|
||||
self.add_job('upload_user_files',
|
||||
natural_id=numero_dossier,
|
||||
numero_dossier=numero_dossier,
|
||||
file_ids=file_ids)
|
||||
|
||||
return {'data': response.json()}
|
||||
|
||||
|
||||
def upload2ForwardFile(self, path, numero_dossier):
|
||||
"""Convert a file path to a ForwardFile."""
|
||||
if path:
|
||||
rand_id = base64.urlsafe_b64encode(os.urandom(6))
|
||||
fwd_file = ForwardFile()
|
||||
fwd_file.numero_demande = rand_id
|
||||
fwd_file.numero_dossier = numero_dossier
|
||||
fwd_file.type_fichier = 'CERFA' if path == TEST_FILE_CERFA_DIA else \
|
||||
'plan' if path == TEST_FILE_PLAN_CADASTRAL else \
|
||||
'unknown'
|
||||
fwd_file.orig_filename = os.path.basename(path)
|
||||
fwd_file.content_type = magic.from_file(path, mime=True)
|
||||
with open(path, 'r') as fp:
|
||||
fwd_file.file_hash = self.file_digest(fp)
|
||||
fwd_file.upload_file = File(open(path, 'r'))
|
||||
fwd_file.upload_status = 'pending'
|
||||
return fwd_file
|
||||
return None
|
||||
|
||||
|
||||
@endpoint(
|
||||
description="Get informations about an openADS 'dossier'",
|
||||
pattern='^(?P<type_dossier>\w+)/(?P<numero_dossier>\w+)/?$',
|
||||
example_pattern='{type_dossier}/{numero_dossier}/',
|
||||
parameters={
|
||||
'type_dossier': {'description': _('Type de dossier'), 'example_value': 'DIA'},
|
||||
'numero_dossier': {'description': _('Numero de dossier'), 'example_value': 'DIA0130551900001'}
|
||||
})
|
||||
def get_dossier(self, request, type_dossier, numero_dossier, *args, **kwargs):
|
||||
url = urlparse.urljoin(self.openADS_API_url, '/dossier/%s/%s' % (type_dossier, numero_dossier))
|
||||
response = self.requests.get(url)
|
||||
if response.status_code // 100 != 2:
|
||||
raise APIError(self.get_response_error(response))
|
||||
try:
|
||||
result = response.json()
|
||||
except ValueError:
|
||||
raise APIError('No JSON content returned: %r' % response.content[:1000])
|
||||
etat = result.get('etat')
|
||||
date_depot = result.get('date_depot')
|
||||
date_limite_instruction = result.get('date_limite_instruction')
|
||||
date_decision = result.get('date_decision')
|
||||
decision = result.get('decision')
|
||||
return {'data': response.json()}
|
||||
|
||||
|
||||
@endpoint(
|
||||
description="[DEV] Add an hardcoded file to an openADS 'dossier' synchronously",
|
||||
pattern='^(?P<type_dossier>\w+)/(?P<numero_dossier>\w+)/?$',
|
||||
example_pattern='{type_dossier}/{numero_dossier}/',
|
||||
parameters={
|
||||
'type_dossier': {'description': _('Type de dossier'), 'example_value': 'DIA'},
|
||||
'numero_dossier': {'description': _('Numero de dossier'), 'example_value': 'DIA0130551900001'}
|
||||
})
|
||||
def add_file(self, request, type_dossier, numero_dossier, *args, **kwargs):
|
||||
dia_b64 = get_file_data(TEST_FILE_CERFA_DIA)
|
||||
payload = [
|
||||
{
|
||||
"filename": "DIA_cerfa_10072-02.pdf",
|
||||
"content_type": "text/plain",
|
||||
"b64_content": dia_b64,
|
||||
"file_type": "CERFA"
|
||||
}
|
||||
]
|
||||
url = urlparse.urljoin(self.openADS_API_url, '/dossier/%s/%s/files' % (type_dossier, numero_dossier))
|
||||
response = self.requests.post(url, json=payload)
|
||||
if response.status_code // 100 != 2:
|
||||
raise APIError(self.get_response_error(response))
|
||||
try:
|
||||
result = response.json()
|
||||
except ValueError:
|
||||
raise APIError('No JSON content returned: %r' % response.content[:1000])
|
||||
# TODO handle response (now its just an informational sentence in key 'data')
|
||||
return {'data': response.json()}
|
||||
|
||||
|
||||
@endpoint(
|
||||
description="[DEV] Add an hardcoded file to an openADS 'dossier' asynchronously",
|
||||
pattern='^(?P<type_dossier>\w+)/(?P<numero_dossier>\w+)/?$',
|
||||
example_pattern='{type_dossier}/{numero_dossier}/',
|
||||
parameters={
|
||||
'type_dossier': {'description': _('Type de dossier'), 'example_value': 'DIA'},
|
||||
'numero_dossier': {'description': _('Numero de dossier'), 'example_value': 'DIA0130551900001'}
|
||||
})
|
||||
def add_file_async(self, request, type_dossier, numero_dossier, *args, **kwargs):
|
||||
f = TEST_FILE_CERFA_DIA
|
||||
fwd_file = self.upload2ForwardFile(f, numero_dossier)
|
||||
fwd_file.save()
|
||||
self.add_job('upload_user_files',
|
||||
natural_id=numero_dossier,
|
||||
type_dossier=type_dossier,
|
||||
numero_dossier=numero_dossier,
|
||||
file_ids=[fwd_file.id])
|
||||
return {'data': 'upload is pending (async)'}
|
||||
|
||||
|
||||
@endpoint(
|
||||
description="Get informations about the forwarding of a user file to openADS",
|
||||
methods=['get'],
|
||||
pattern='^(?P<numero_dossier>\w+)(/(?P<fichier_id>\w+))?/?$',
|
||||
example_pattern='{numero_dossier}/{fichier_id}/',
|
||||
parameters={
|
||||
'numero_dossier': {'description': _('Numero de dossier') , 'example_value': 'DIA0130551900001'},
|
||||
'fichier_id' : {'description': _('Identifiant de fichier'), 'example_value': '78'}
|
||||
})
|
||||
def get_fwd_files_status(self, request, numero_dossier, fichier_id=None, *args, **kwargs):
|
||||
payload = []
|
||||
fwd_files = []
|
||||
if not fichier_id:
|
||||
try:
|
||||
fwd_files = ForwardFile.objects.filter(numero_dossier=numero_dossier)
|
||||
except ForwardFile.DoesNotExist:
|
||||
raise Http404("No file matches 'numero_dossier=%s'." % numero_dossier)
|
||||
elif fichier_id:
|
||||
try:
|
||||
fwd_file = ForwardFile.objects.get(id=fichier_id)
|
||||
except ForwardFile.DoesNotExist:
|
||||
raise Http404("No file matches 'numero_dossier=%s' and 'id=%s'." % (numero_dossier, fichier_id))
|
||||
if fwd_file:
|
||||
fwd_files.append(fwd_file)
|
||||
if fwd_files:
|
||||
for fwd_file in fwd_files:
|
||||
payload.append({
|
||||
'id' : fwd_file.id,
|
||||
'numero_demande': fwd_file.numero_demande,
|
||||
'numero_dossier': fwd_file.numero_dossier,
|
||||
'type_fichier' : fwd_file.type_fichier,
|
||||
'file_hash' : fwd_file.file_hash,
|
||||
'orig_filename' : fwd_file.orig_filename,
|
||||
'content_type' : fwd_file.content_type,
|
||||
'upload_status' : fwd_file.upload_status,
|
||||
'upload_msg' : fwd_file.upload_msg,
|
||||
'b64_content' : get_file_data(fwd_file.upload_file.path),
|
||||
'last_update_datetime' : fwd_file.last_update_datetime
|
||||
})
|
||||
return {'data': payload}
|
||||
|
||||
|
||||
@endpoint(
|
||||
description="[DEV] Create a ForwardFile from an hardcoded file",
|
||||
methods=['get'],
|
||||
pattern='^(?P<numero_dossier>\w+)/?$',
|
||||
example_pattern='{numero_dossier}/',
|
||||
parameters={
|
||||
'numero_dossier': {'description': _('Numero de dossier'), 'example_value': 'DIA0130551900001'}
|
||||
})
|
||||
def createForwardFile(self, request, numero_dossier, *args, **kwargs):
|
||||
f = TEST_FILE_CERFA_DIA
|
||||
fwd_file = self.upload2ForwardFile(f, numero_dossier)
|
||||
fwd_file.save()
|
||||
return {'data': "ForwardFile '%s' created" % fwd_file.id}
|
||||
|
||||
|
||||
@endpoint(
|
||||
description="Get a 'courrier' from an openADS 'dossier'",
|
||||
pattern='^(?P<type_dossier>\w+)/(?P<numero_dossier>\w+)/?$',
|
||||
example_pattern='{type_dossier}/{numero_dossier}/',
|
||||
parameters={
|
||||
'type_dossier': {'description': _('Type de dossier'), 'example_value': 'DIA'},
|
||||
'numero_dossier': {'description': _('Numero de dossier'), 'example_value': 'DIA0130551900001'}
|
||||
})
|
||||
def get_courrier(self, request, type_dossier, numero_dossier, *args, **kwargs):
|
||||
url = urlparse.urljoin(
|
||||
self.openADS_API_url,
|
||||
'/dossier/%s/%s/courrier/%s' % (type_dossier, numero_dossier, 'dia_renonciation_preempter'))
|
||||
response = self.requests.get(url)
|
||||
if response.status_code // 100 != 2:
|
||||
raise APIError(self.get_response_error(response))
|
||||
try:
|
||||
result = response.json()
|
||||
except ValueError:
|
||||
raise APIError('No JSON content returned: %r' % response.content[:1000])
|
||||
courrier = result.get('files')[0]
|
||||
try:
|
||||
courrier_content = base64.b64decode(courrier['b64_content'])
|
||||
except TypeError:
|
||||
raise APIError('Invalid content for courrier')
|
||||
return {'data': response.json()}
|
||||
|
||||
|
||||
def get_response_error(self, response):
|
||||
"""Return a error string from an HTTP response."""
|
||||
try:
|
||||
result = response.json()
|
||||
errors = result.get('errors')
|
||||
msg = []
|
||||
if errors:
|
||||
for error in errors:
|
||||
location = error.get('location')
|
||||
name = error.get('name')
|
||||
desc = error.get('description')
|
||||
msg.append('[%s] %s' % (location, clean_spaces(desc)))
|
||||
if msg:
|
||||
return "HTTP error: %s, %s" % (response.status_code, ','.join(msg))
|
||||
except ValueError:
|
||||
pass
|
||||
return "HTTP error: %s, %s" % \
|
||||
(response.status_code,
|
||||
clean_spaces(strip_tags(response.content[:1000])) if response.content else '')
|
||||
|
||||
|
||||
def upload_user_files(self, type_dossier, numero_dossier, file_ids):
|
||||
"""A Job to forward user uploaded files to openADS."""
|
||||
payload = []
|
||||
fwd_files = []
|
||||
for fid in file_ids:
|
||||
self.logger.debug("upload_user_files() ForwardFile file_id: %s" % fid)
|
||||
fwd_file = ForwardFile.objects.get(id=fid)
|
||||
if fwd_file:
|
||||
self.logger.debug("upload_user_files() got ForwardFile")
|
||||
payload.append({
|
||||
'filename' : os.path.basename(fwd_file.upload_file.path) + '.pdf',
|
||||
'content_type' : fwd_file.content_type,
|
||||
'b64_content' : get_file_data(fwd_file.upload_file.path),
|
||||
'file_type' : fwd_file.type_fichier
|
||||
})
|
||||
self.logger.debug("upload_user_files() payload added")
|
||||
fwd_file.upload_status = 'uploading'
|
||||
if fwd_file.upload_msg and re.search(r'^attempt \d+$', fwd_file.upload_msg):
|
||||
self.logger.debug("upload_user_files() upload_msg: '%s'" % fwd_file.upload_msg)
|
||||
attempt_num = fwd_file.upload_msg.replace('attempt ', '').strip()
|
||||
self.logger.debug("upload_user_files() attempt_num: '%s'" % attempt_num)
|
||||
fwd_file.upload_msg = 'attempt %s' % attempt_num
|
||||
else:
|
||||
fwd_file.upload_msg = 'attempt 1'
|
||||
self.logger.debug("upload_user_files() ForwardFile ready to be saved")
|
||||
fwd_file.save()
|
||||
fwd_files.append(fwd_file)
|
||||
if payload:
|
||||
self.logger.debug("upload_user_files() payload is not empty")
|
||||
debug_payload = copy.deepcopy(payload)
|
||||
for p in debug_payload:
|
||||
if 'b64_content' in p:
|
||||
p['b64_content'] = '<b64 content>'
|
||||
self.logger.debug("upload_user_files() payload is: %s" % str(debug_payload))
|
||||
url = urlparse.urljoin(self.openADS_API_url, '/dossier/%s/%s/files' % (type_dossier, numero_dossier))
|
||||
response = self.requests.post(url, json=payload)
|
||||
if response.status_code // 100 != 2:
|
||||
for fwd_file in fwd_files:
|
||||
fwd_file.upload_status = 'failed'
|
||||
fwd_file.upload_msg = self.get_response_error(response)
|
||||
fwd_file.save()
|
||||
else:
|
||||
try:
|
||||
result = response.json()
|
||||
except ValueError:
|
||||
for fwd_file in fwd_files:
|
||||
fwd_file.upload_status = 'failed'
|
||||
fwd_file.upload_msg = 'No JSON content returned: %r' % response.content[:1000]
|
||||
fwd_file.save()
|
||||
else:
|
||||
# TODO handle response (now its just an informational sentence in key 'data')
|
||||
for fwd_file in fwd_files:
|
||||
fwd_file.upload_status = 'success'
|
||||
fwd_file.upload_msg = 'uploaded successfuly'
|
||||
fwd_file.save()
|
||||
|
||||
|
||||
# copy-pasted from 'wcs/qommon/misc.py'
|
||||
def file_digest(self, content, chunk_size=100000):
|
||||
"""Return a hash for the content specified."""
|
||||
digest = hashlib.sha256()
|
||||
content.seek(0)
|
||||
def read_chunk():
|
||||
return content.read(chunk_size)
|
||||
for chunk in iter(read_chunk, ''):
|
||||
digest.update(chunk)
|
||||
return digest.hexdigest()
|
||||
|
Reference in New Issue