start passerelle-reunion-fsn
This commit is contained in:
commit
d709c5bbf5
|
@ -0,0 +1,3 @@
|
|||
*.pyc
|
||||
*.egg-info
|
||||
*.xml
|
|
@ -0,0 +1,42 @@
|
|||
@Library('eo-jenkins-lib@master') import eo.Utils
|
||||
|
||||
pipeline {
|
||||
agent any
|
||||
options {
|
||||
disableConcurrentBuilds()
|
||||
}
|
||||
stages {
|
||||
stage('Unit Tests') {
|
||||
steps {
|
||||
sh 'tox -rv'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
mergeJunitResults()
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Packaging') {
|
||||
steps {
|
||||
script {
|
||||
if (env.JOB_NAME == 'passerelle-reunion-fsn' && env.GIT_BRANCH == 'origin/master') {
|
||||
sh 'sudo -H -u eobuilder /usr/local/bin/eobuilder -d stretch passerelle-reunion-fsn'
|
||||
} else if (env.GIT_BRANCH.startsWith('hotfix/')) {
|
||||
sh "sudo -H -u eobuilder /usr/local/bin/eobuilder -d stretch --branch ${env.GIT_BRANCH} --hotfix passerelle-reunion-fsn"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
utils = new Utils()
|
||||
utils.mail_notify(currentBuild, env, 'ci+jenkins-passerelle-reunion-fsn@entrouvert.org')
|
||||
}
|
||||
}
|
||||
success {
|
||||
cleanWs()
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
passerelle-reunion-fsn (0-0) unstable; urgency=low
|
||||
|
||||
* initial packaging.
|
||||
|
||||
-- Emmanuel Cazenave <ecazenave@entrouvert.com> Wed, 16 Apr 2010 07:44:31 +0200
|
|
@ -0,0 +1 @@
|
|||
9
|
|
@ -0,0 +1,12 @@
|
|||
Source: passerelle-reunion-fsn
|
||||
Maintainer: Emmanuel Cazenave <ecazenave@entrouvert.com>
|
||||
Section: python
|
||||
Priority: optional
|
||||
Build-Depends: python-setuptools (>= 0.6b3), python-all (>= 2.7), debhelper (>= 9), python-django
|
||||
Standards-Version: 3.9.1
|
||||
|
||||
Package: python-passerelle-reunion-fsn
|
||||
Architecture: all
|
||||
Depends: ${misc:Depends}, ${python:Depends}
|
||||
Description: Passerelle CR Reunion FSN
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/make -f
|
||||
|
||||
%:
|
||||
dh $@ --with python2
|
|
@ -0,0 +1 @@
|
|||
3.0 (quilt)
|
|
@ -0,0 +1,93 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.29 on 2020-04-16 12:12
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import passerelle_reunion_fsn.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('base', '0018_smslog'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='DSDossier',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('ds_id', models.CharField(max_length=256)),
|
||||
('ds_state', models.CharField(max_length=256)),
|
||||
('csv_file', models.FileField(upload_to=passerelle_reunion_fsn.models.csv_file_location)),
|
||||
('csv_filename', models.CharField(max_length=256)),
|
||||
('last_update_datetime', models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Entreprise',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('code_application', models.CharField(max_length=20)),
|
||||
('sequence', models.CharField(max_length=16)),
|
||||
('periode', models.CharField(max_length=35)),
|
||||
('siren', models.CharField(max_length=11)),
|
||||
('nom1', models.CharField(blank=True, max_length=35)),
|
||||
('nom2', models.CharField(blank=True, max_length=35)),
|
||||
('nb_salarie', models.CharField(max_length=2)),
|
||||
('rue', models.CharField(blank=True, max_length=35)),
|
||||
('boite_postale', models.CharField(blank=True, max_length=10)),
|
||||
('code_postale', models.CharField(blank=True, max_length=10)),
|
||||
('ville', models.CharField(blank=True, max_length=35)),
|
||||
('code_pays', models.CharField(max_length=2)),
|
||||
('region', models.CharField(max_length=3)),
|
||||
('iban', models.CharField(max_length=34)),
|
||||
('montant', models.FloatField()),
|
||||
('devise', models.CharField(max_length=3)),
|
||||
('nom_demandeur', models.CharField(blank=True, max_length=35)),
|
||||
('prenom_demandeur', models.CharField(blank=True, max_length=35)),
|
||||
('qualite', models.CharField(blank=True, max_length=35)),
|
||||
('tel', models.CharField(blank=True, max_length=30)),
|
||||
('courriel', models.CharField(blank=True, max_length=241)),
|
||||
('last_update_datetime', models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='FSNReunionConnector',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('title', models.CharField(max_length=50, verbose_name='Title')),
|
||||
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
|
||||
('description', models.TextField(verbose_name='Description')),
|
||||
('api_url', models.URLField(max_length=400, verbose_name='DS API URL')),
|
||||
('token', models.CharField(max_length=256, verbose_name='DS token')),
|
||||
('demarche_number', models.IntegerField(verbose_name='Demarche number')),
|
||||
('instructeur_id', models.CharField(blank=True, help_text='Region identifier for this case', max_length=256, verbose_name='Instructeur identifier')),
|
||||
('users', models.ManyToManyField(blank=True, related_name='_fsnreunionconnector_users_+', related_query_name='+', to='base.ApiUser')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'FSN Reunion',
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='entreprise',
|
||||
name='resource',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='passerelle_reunion_fsn.FSNReunionConnector'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='dsdossier',
|
||||
name='resource',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='dossiers', to='passerelle_reunion_fsn.FSNReunionConnector'),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='entreprise',
|
||||
unique_together=set([('resource', 'siren'), ('resource', 'sequence')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='dsdossier',
|
||||
unique_together=set([('resource', 'ds_id')]),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,460 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# passerelle-reunion-fsn
|
||||
# Copyright (C) 2020 Entr'ouvert
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Affero General Public License as published
|
||||
# by the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import csv
|
||||
from io import BytesIO
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.db import models, transaction
|
||||
from django.http import FileResponse
|
||||
from django.utils.encoding import force_str, smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from passerelle.base.models import BaseResource
|
||||
from passerelle.utils.api import endpoint
|
||||
from passerelle.utils.jsonresponse import APIError
|
||||
|
||||
|
||||
COLUMNS_KEYNAMES = [
|
||||
'code_application',
|
||||
'sequence',
|
||||
'periode',
|
||||
'siren',
|
||||
'nom1',
|
||||
'nom2',
|
||||
'nb_salarie',
|
||||
'rue',
|
||||
'boite_postale',
|
||||
'code_postale',
|
||||
'ville',
|
||||
'code_pays',
|
||||
'region',
|
||||
'iban',
|
||||
'montant',
|
||||
'devise',
|
||||
'nom_demandeur',
|
||||
'prenom_demandeur',
|
||||
'qualite',
|
||||
'tel',
|
||||
'courriel',
|
||||
]
|
||||
|
||||
|
||||
def csv_file_location(instance, filename):
|
||||
return 'fsn_reunion/%s/%s' % (instance.ds_id, filename)
|
||||
|
||||
|
||||
class FSNReunionConnector(BaseResource):
|
||||
|
||||
category = _('Business Process Connectors')
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('FSN Reunion')
|
||||
|
||||
api_url = models.URLField(max_length=400, verbose_name=_('DS API URL'))
|
||||
token = models.CharField(max_length=256, verbose_name=_('DS token'))
|
||||
demarche_number = models.IntegerField(verbose_name=_('Demarche number'))
|
||||
instructeur_id = models.CharField(
|
||||
max_length=256, blank=True, verbose_name=_('Instructeur identifier'),
|
||||
help_text=_('Region identifier for this case')
|
||||
)
|
||||
|
||||
def _ds_call(self, query, variables):
|
||||
headers = {
|
||||
'Authorization': 'Bearer token=' + self.token
|
||||
}
|
||||
data = {
|
||||
'query': query,
|
||||
'variables': variables
|
||||
}
|
||||
response = self.requests.post(self.api_url, headers=headers, json=data)
|
||||
if response.status_code != 200:
|
||||
raise APIError('An error occured, status code : %s' % response.status_code)
|
||||
json = response.json()
|
||||
|
||||
if 'errors' in json:
|
||||
msg = 'Unkown error'
|
||||
for error in json['errors']:
|
||||
if 'message' in error:
|
||||
msg = error['message']
|
||||
break
|
||||
raise APIError('An error occured : %s' % msg)
|
||||
|
||||
return json['data']
|
||||
|
||||
@endpoint(
|
||||
methods=['get'], perm='can_access', name='dsproxy-get-instructeurs',
|
||||
description=_('DS Proxy: get instructeurs')
|
||||
)
|
||||
def dsproxy_get_instructeurs(self, request):
|
||||
query = '''
|
||||
query getInstructeurs($demarcheNumber: Int!){
|
||||
demarche(number: $demarcheNumber) {
|
||||
groupeInstructeurs {
|
||||
id
|
||||
label
|
||||
}
|
||||
}
|
||||
}
|
||||
'''
|
||||
variables = {
|
||||
'demarcheNumber': self.demarche_number
|
||||
}
|
||||
return {
|
||||
'data': self._ds_call(query, variables)
|
||||
}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'], perm='can_access', name='dsproxy-get-liste-champs',
|
||||
description=_('DS Proxy: get fields identifiers'),
|
||||
)
|
||||
def dsproxy_get_liste_champs(self, request):
|
||||
query = '''
|
||||
query getChampDescriptors($demarcheNumber: Int!) {
|
||||
demarche(number: $demarcheNumber) {
|
||||
champDescriptors {
|
||||
id
|
||||
label
|
||||
type
|
||||
required
|
||||
}
|
||||
}
|
||||
}
|
||||
'''
|
||||
variables = {
|
||||
'demarcheNumber': self.demarche_number
|
||||
}
|
||||
return {
|
||||
'data': self._ds_call(query, variables)
|
||||
}
|
||||
|
||||
def _ds_get_dossiers(self):
|
||||
if not self.instructeur_id:
|
||||
raise APIError('Need an instructeur identifer')
|
||||
|
||||
query = '''
|
||||
query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: Int, $after: String) {
|
||||
demarche(number: $demarcheNumber) {
|
||||
number
|
||||
dossiers(first: $first, after: $after, createdSince: $createdSince) {
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
nodes {
|
||||
id
|
||||
number
|
||||
state
|
||||
datePassageEnConstruction
|
||||
datePassageEnInstruction
|
||||
dateTraitement
|
||||
dateDerniereModification
|
||||
usager {
|
||||
email
|
||||
}
|
||||
groupeInstructeur {
|
||||
id
|
||||
}
|
||||
demandeur {
|
||||
... on PersonneMorale {
|
||||
siret
|
||||
entreprise {
|
||||
siren
|
||||
raisonSociale
|
||||
}
|
||||
}
|
||||
}
|
||||
messages {
|
||||
email
|
||||
body
|
||||
createdAt
|
||||
attachment {
|
||||
url
|
||||
filename
|
||||
contentType
|
||||
byteSize
|
||||
checksum
|
||||
}
|
||||
}
|
||||
champs {
|
||||
id
|
||||
label
|
||||
... on TextChamp {
|
||||
value
|
||||
}
|
||||
... on DateChamp {
|
||||
value
|
||||
}
|
||||
... on PieceJustificativeChamp {
|
||||
file {
|
||||
url
|
||||
filename
|
||||
contentType
|
||||
byteSize
|
||||
checksum
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
'''
|
||||
variables = {
|
||||
'demarcheNumber': self.demarche_number,
|
||||
'after': None
|
||||
}
|
||||
dossiers = []
|
||||
has_next_page = True
|
||||
while has_next_page:
|
||||
raw_data = self._ds_call(query, variables)
|
||||
data = raw_data['demarche']['dossiers']
|
||||
for node in data['nodes']:
|
||||
if node['groupeInstructeur']['id'] == self.instructeur_id or not self.instructeur_id:
|
||||
dossiers.append(node)
|
||||
has_next_page = data['pageInfo']['hasNextPage']
|
||||
variables['after'] = data['pageInfo']['endCursor']
|
||||
|
||||
return {
|
||||
'dossiers': dossiers,
|
||||
'num_dossiers': len(dossiers)
|
||||
}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'], perm='can_access', name='dsproxy-get-dossiers',
|
||||
description=_('DS Proxy: get dossiers')
|
||||
)
|
||||
def dsproxy_get_dossiers(self, request):
|
||||
return {
|
||||
'data': self._ds_get_dossiers()
|
||||
}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'], perm='can_access', name='fetch-dossiers',
|
||||
description=_('Fetch dossiers from DS and consolidate into local data')
|
||||
)
|
||||
def fetch_dossiers(self, request):
|
||||
dossiers = self._fetch_dossiers(request)
|
||||
return {
|
||||
'data': {
|
||||
'dossiers': dossiers,
|
||||
'num_dossiers': len(dossiers)
|
||||
}
|
||||
}
|
||||
|
||||
def _fetch_dossiers(self, request):
|
||||
res = []
|
||||
for dossier in self._ds_get_dossiers()['dossiers']:
|
||||
id_dossier = dossier['id']
|
||||
|
||||
try:
|
||||
local_dossier = self.dossiers.get(ds_id=id_dossier)
|
||||
# already fetched, update metadata
|
||||
local_dossier.ds_state = dossier['state']
|
||||
local_dossier.save()
|
||||
continue
|
||||
except DSDossier.DoesNotExist:
|
||||
pass
|
||||
|
||||
with transaction.atomic():
|
||||
|
||||
# grab file
|
||||
ds_dossier = None
|
||||
for champ in dossier['champs']:
|
||||
if 'file' in champ:
|
||||
file_url = champ['file']['url']
|
||||
# TODO : check file integrity
|
||||
# file_checksum = champ['file']['checksum']
|
||||
filename = champ['file']['filename']
|
||||
response = self.requests.get(file_url)
|
||||
assert response.status_code == 200
|
||||
ds_dossier = DSDossier.objects.create(
|
||||
resource=self, ds_id=id_dossier, csv_filename=filename,
|
||||
ds_state=dossier['state']
|
||||
)
|
||||
ds_dossier.csv_file.save(filename, BytesIO(response.content))
|
||||
res.append(ds_dossier.to_json(request))
|
||||
break
|
||||
|
||||
# upsert into Entreprise
|
||||
if ds_dossier:
|
||||
for row in ds_dossier.get_rows():
|
||||
sequence = row['sequence']
|
||||
try:
|
||||
entreprise = Entreprise.objects.filter(resource=self, sequence=sequence).get()
|
||||
for attr, value in row.items():
|
||||
setattr(entreprise, attr, value)
|
||||
except Entreprise.DoesNotExist:
|
||||
entreprise = Entreprise(resource=self, **row)
|
||||
entreprise.save()
|
||||
|
||||
return res
|
||||
|
||||
@endpoint(
|
||||
methods=['get'], perm='can_access', name='get-dossiers',
|
||||
description=_('Get dossiers')
|
||||
)
|
||||
def get_dossiers(self, request):
|
||||
res = []
|
||||
for dossier in self.dossiers.all():
|
||||
res.append(dossier.to_json(request))
|
||||
|
||||
return {
|
||||
'data': {
|
||||
'dossiers': res
|
||||
}
|
||||
}
|
||||
|
||||
@endpoint(
|
||||
methods=['get'], perm='can_access', example_pattern='{dossier_pk}/',
|
||||
pattern='^(?P<dossier_pk>\w+)/$', name='get-dossier-file',
|
||||
parameters={
|
||||
'dossier_pk': {
|
||||
'description': _('Local dossier identifier'),
|
||||
'example_value': '2'
|
||||
}
|
||||
}, description=_('Get csv file from dossier')
|
||||
)
|
||||
def get_dossier_file(self, request, dossier_pk):
|
||||
try:
|
||||
dossier = self.dossiers.get(pk=dossier_pk)
|
||||
except DSDossier.DoesNotExist:
|
||||
raise APIError('Dossier deos not exist')
|
||||
|
||||
response = FileResponse(dossier.csv_file, content_type='text/csv')
|
||||
response['Content-Disposition'] = 'attachment; filename="%s"' % dossier.csv_filename
|
||||
return response
|
||||
|
||||
@endpoint(
|
||||
methods=['get'], perm='can_access', name='get-csv',
|
||||
description=_('Get consolidated data (csv file)')
|
||||
)
|
||||
def get_csv(self, request):
|
||||
|
||||
class Echo(object):
|
||||
"""An object that implements just the write method of the file-like
|
||||
interface.
|
||||
"""
|
||||
def write(self, value):
|
||||
"""Write the value by returning it, instead of storing in a buffer."""
|
||||
return value
|
||||
|
||||
def get_rows():
|
||||
for entreprise in Entreprise.objects.filter(resource=self):
|
||||
row = [getattr(entreprise, attr) for attr in COLUMNS_KEYNAMES]
|
||||
yield row
|
||||
|
||||
pseudo_buffer = Echo()
|
||||
writer = csv.writer(pseudo_buffer)
|
||||
response = FileResponse(
|
||||
(writer.writerow(row) for row in get_rows()), content_type="text/csv"
|
||||
)
|
||||
response['Content-Disposition'] = 'attachment; filename="somefilename.csv"'
|
||||
return response
|
||||
|
||||
|
||||
class Entreprise(models.Model):
|
||||
|
||||
class Meta:
|
||||
unique_together = (('resource', 'sequence'), ('resource', 'siren'),)
|
||||
|
||||
resource = models.ForeignKey(FSNReunionConnector, on_delete=models.CASCADE)
|
||||
code_application = models.CharField(max_length=20)
|
||||
sequence = models.CharField(max_length=16)
|
||||
periode = models.CharField(max_length=35)
|
||||
siren = models.CharField(max_length=11)
|
||||
nom1 = models.CharField(max_length=35, blank=True)
|
||||
nom2 = models.CharField(max_length=35, blank=True)
|
||||
nb_salarie = models.CharField(max_length=2)
|
||||
rue = models.CharField(max_length=35, blank=True)
|
||||
boite_postale = models.CharField(max_length=10, blank=True)
|
||||
code_postale = models.CharField(max_length=10, blank=True)
|
||||
ville = models.CharField(max_length=35, blank=True)
|
||||
code_pays = models.CharField(max_length=2)
|
||||
region = models.CharField(max_length=3)
|
||||
iban = models.CharField(max_length=34)
|
||||
montant = models.FloatField()
|
||||
devise = models.CharField(max_length=3)
|
||||
nom_demandeur = models.CharField(max_length=35, blank=True)
|
||||
prenom_demandeur = models.CharField(max_length=35, blank=True)
|
||||
qualite = models.CharField(max_length=35, blank=True)
|
||||
tel = models.CharField(max_length=30, blank=True)
|
||||
courriel = models.CharField(max_length=241, blank=True)
|
||||
last_update_datetime = models.DateTimeField(auto_now=True)
|
||||
|
||||
|
||||
class DSDossier(models.Model):
|
||||
|
||||
class Meta:
|
||||
unique_together = (('resource', 'ds_id'),)
|
||||
|
||||
resource = models.ForeignKey(
|
||||
FSNReunionConnector, on_delete=models.CASCADE, related_name='dossiers'
|
||||
)
|
||||
ds_id = models.CharField(max_length=256)
|
||||
ds_state = models.CharField(max_length=256)
|
||||
csv_file = models.FileField(upload_to=csv_file_location)
|
||||
csv_filename = models.CharField(max_length=256)
|
||||
last_update_datetime = models.DateTimeField(auto_now=True)
|
||||
|
||||
def to_json(self, request):
|
||||
csv_file_url = request.build_absolute_uri(
|
||||
reverse(
|
||||
'generic-endpoint',
|
||||
kwargs={
|
||||
'connector': self.resource.get_connector_slug(),
|
||||
'slug': self.resource.slug,
|
||||
'endpoint': 'get-dossier-file'
|
||||
}
|
||||
)
|
||||
) + '/%s/' % self.id
|
||||
|
||||
return {
|
||||
'id': str(self.pk),
|
||||
'ds_id': self.ds_id,
|
||||
'ds_state': self.ds_state,
|
||||
'csv_filename': self.csv_filename,
|
||||
'csv_file': csv_file_url,
|
||||
'last_update_datetime': self.last_update_datetime
|
||||
}
|
||||
|
||||
def get_content_without_bom(self):
|
||||
self.csv_file.seek(0)
|
||||
content = self.csv_file.read()
|
||||
return force_str(content.decode('utf-8-sig', 'ignore').encode('utf-8'))
|
||||
|
||||
def get_rows(self):
|
||||
content = self.get_content_without_bom()
|
||||
reader = csv.reader(content.splitlines(), delimiter=';')
|
||||
rows = list(reader)
|
||||
|
||||
if not rows:
|
||||
return []
|
||||
|
||||
rows = [[smart_text(x) for x in y] for y in rows if y]
|
||||
titles = [t.strip() for t in COLUMNS_KEYNAMES]
|
||||
indexes = [titles.index(t) for t in titles if t]
|
||||
caption = [titles[i] for i in indexes]
|
||||
|
||||
def get_cell(row, index):
|
||||
try:
|
||||
return row[index]
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
return [{caption: get_cell(row, index) for caption, index in zip(caption, indexes)} for row in rows]
|
|
@ -0,0 +1,49 @@
|
|||
#! /usr/bin/env python
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
from distutils.command.sdist import sdist
|
||||
|
||||
|
||||
def get_version():
|
||||
if os.path.exists('VERSION'):
|
||||
version_file = open('VERSION', 'r')
|
||||
version = version_file.read()
|
||||
version_file.close()
|
||||
return version
|
||||
if os.path.exists('.git'):
|
||||
p = subprocess.Popen(['git', 'describe', '--dirty', '--match=v*'], stdout=subprocess.PIPE)
|
||||
result = p.communicate()[0]
|
||||
if p.returncode == 0:
|
||||
version = str(result.split()[0][1:])
|
||||
version = version.replace('-', '.')
|
||||
return version
|
||||
return '0'
|
||||
|
||||
|
||||
class eo_sdist(sdist):
|
||||
|
||||
def run(self):
|
||||
if os.path.exists('VERSION'):
|
||||
os.remove('VERSION')
|
||||
version = get_version()
|
||||
version_file = open('VERSION', 'w')
|
||||
version_file.write(version)
|
||||
version_file.close()
|
||||
sdist.run(self)
|
||||
if os.path.exists('VERSION'):
|
||||
os.remove('VERSION')
|
||||
|
||||
|
||||
setup(
|
||||
name='passerelle-reunion-fsn',
|
||||
version=get_version(),
|
||||
author='Emmanuel Cazenave',
|
||||
author_email='ecazenave@entrouvert.com',
|
||||
packages=find_packages(),
|
||||
cmdclass={
|
||||
'sdist': eo_sdist,
|
||||
}
|
||||
)
|
|
@ -0,0 +1,14 @@
|
|||
import os
|
||||
|
||||
|
||||
INSTALLED_APPS += ('passerelle_reunion_fsn',)
|
||||
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.postgresql_psycopg2',
|
||||
'TEST': {
|
||||
'NAME': 'passerelle-reunion-fsn-test-%s' % os.environ.get("BRANCH_NAME", "").replace('/', '-')[:63],
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.cache import cache
|
||||
import django_webtest
|
||||
import pytest
|
||||
|
||||
from passerelle_reunion_fsn.models import FSNReunionConnector
|
||||
from passerelle.base.models import ApiUser, AccessRight
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app(request):
|
||||
wtm = django_webtest.WebTestMixin()
|
||||
wtm._patch_settings()
|
||||
cache.clear()
|
||||
yield django_webtest.DjangoTestApp()
|
||||
wtm._unpatch_settings()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def connector(db):
|
||||
connector = FSNReunionConnector.objects.create(
|
||||
slug='test', api_url='https://whatever', token='token', demarche_number=1,
|
||||
instructeur_id='xxxx'
|
||||
)
|
||||
api = ApiUser.objects.create(username='all', keytype='', key='')
|
||||
obj_type = ContentType.objects.get_for_model(connector)
|
||||
AccessRight.objects.create(
|
||||
codename='can_access', apiuser=api,
|
||||
resource_type=obj_type, resource_pk=connector.pk)
|
||||
|
||||
|
||||
def test_dummny(app, connector):
|
||||
assert True
|
|
@ -0,0 +1,20 @@
|
|||
[tox]
|
||||
toxworkdir = {env:TMPDIR:/tmp}/tox-{env:USER}/passerelle-reunion-fsn/{env:BRANCH_NAME:}
|
||||
envlist = py2-django111
|
||||
|
||||
[testenv]
|
||||
usedevelop = True
|
||||
basepython = python2
|
||||
setenv =
|
||||
DJANGO_SETTINGS_MODULE=passerelle.settings
|
||||
PASSERELLE_SETTINGS_FILE=tests/settings.py
|
||||
deps =
|
||||
django111: django>=1.11,<1.12
|
||||
git+http://git.entrouvert.org/passerelle.git
|
||||
django-webtest
|
||||
psycopg2-binary
|
||||
pytest
|
||||
pytest-django
|
||||
xmlschema<1.1
|
||||
commands =
|
||||
django111: py.test {posargs: --junitxml=junit-{envname}.xml tests/}
|
Reference in New Issue