From bd064c12965f53403a2b1b7b364538f4a502d34d Mon Sep 17 00:00:00 2001 From: Valentin Deniaud Date: Wed, 1 Mar 2023 14:27:18 +0100 Subject: [PATCH] misc: apply pre-commit-hooks (#74976) --- debian/control | 11 +- .../migrations/0001_initial.py | 54 ++- .../0002_entreprise_missing_column.py | 2 - .../migrations/0003_csv_checksum.py | 2 - .../migrations/0004_wcs_batches.py | 37 +- .../migrations/0005_text_to_jsonb.py | 4 - .../migrations/0006_auto_20200519_1158.py | 4 +- .../migrations/0007_entreprise_code_ape.py | 2 - passerelle_reunion_fsn/models.py | 362 ++++++++---------- setup.py | 15 +- tests/settings.py | 6 +- tests/test_connector.py | 16 +- 12 files changed, 249 insertions(+), 266 deletions(-) diff --git a/debian/control b/debian/control index 74589a0..36bc3ae 100644 --- a/debian/control +++ b/debian/control @@ -2,13 +2,14 @@ Source: passerelle-reunion-fsn Maintainer: Emmanuel Cazenave Section: python Priority: optional -Build-Depends: python3-setuptools, python3-all, debhelper-compat (= 12), dh-python, python3-django +Build-Depends: debhelper-compat (= 12), + dh-python, + python3-all, + python3-django, + python3-setuptools, Standards-Version: 3.9.1 Package: python3-passerelle-reunion-fsn Architecture: all -Depends: ${misc:Depends}, - ${python3:Depends}, - python3-unicodecsv +Depends: python3-unicodecsv, ${misc:Depends}, ${python3:Depends} Description: Passerelle CR Reunion FSN (Python 3) - diff --git a/passerelle_reunion_fsn/migrations/0001_initial.py b/passerelle_reunion_fsn/migrations/0001_initial.py index 340a5aa..615ecdf 100644 --- a/passerelle_reunion_fsn/migrations/0001_initial.py +++ b/passerelle_reunion_fsn/migrations/0001_initial.py @@ -1,9 +1,8 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.29 on 2020-04-16 12:12 -from __future__ import unicode_literals -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations, models + import passerelle_reunion_fsn.models @@ -19,7 +18,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='DSDossier', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('ds_id', models.CharField(max_length=256)), ('ds_state', models.CharField(max_length=256)), ('csv_file', models.FileField(upload_to=passerelle_reunion_fsn.models.csv_file_location)), @@ -30,7 +32,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Entreprise', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('code_application', models.CharField(max_length=20)), ('sequence', models.CharField(max_length=16)), ('periode', models.CharField(max_length=35)), @@ -58,15 +63,34 @@ class Migration(migrations.Migration): migrations.CreateModel( name='FSNReunionConnector', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('title', models.CharField(max_length=50, verbose_name='Title')), ('slug', models.SlugField(unique=True, verbose_name='Identifier')), ('description', models.TextField(verbose_name='Description')), ('api_url', models.URLField(max_length=400, verbose_name='DS API URL')), ('token', models.CharField(max_length=256, verbose_name='DS token')), ('demarche_number', models.IntegerField(verbose_name='Demarche number')), - ('instructeur_id', models.CharField(blank=True, help_text='Region identifier for this case', max_length=256, verbose_name='Instructeur identifier')), - ('users', models.ManyToManyField(blank=True, related_name='_fsnreunionconnector_users_+', related_query_name='+', to='base.ApiUser')), + ( + 'instructeur_id', + models.CharField( + blank=True, + help_text='Region identifier for this case', + max_length=256, + verbose_name='Instructeur identifier', + ), + ), + ( + 'users', + models.ManyToManyField( + blank=True, + related_name='_fsnreunionconnector_users_+', + related_query_name='+', + to='base.ApiUser', + ), + ), ], options={ 'verbose_name': 'FSN Reunion', @@ -75,19 +99,25 @@ class Migration(migrations.Migration): migrations.AddField( model_name='entreprise', name='resource', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='passerelle_reunion_fsn.FSNReunionConnector'), + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to='passerelle_reunion_fsn.FSNReunionConnector' + ), ), migrations.AddField( model_name='dsdossier', name='resource', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='dossiers', to='passerelle_reunion_fsn.FSNReunionConnector'), + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name='dossiers', + to='passerelle_reunion_fsn.FSNReunionConnector', + ), ), migrations.AlterUniqueTogether( name='entreprise', - unique_together=set([('resource', 'siren'), ('resource', 'sequence')]), + unique_together={('resource', 'siren'), ('resource', 'sequence')}, ), migrations.AlterUniqueTogether( name='dsdossier', - unique_together=set([('resource', 'ds_id')]), + unique_together={('resource', 'ds_id')}, ), ] diff --git a/passerelle_reunion_fsn/migrations/0002_entreprise_missing_column.py b/passerelle_reunion_fsn/migrations/0002_entreprise_missing_column.py index 0cffb53..8d1cc33 100644 --- a/passerelle_reunion_fsn/migrations/0002_entreprise_missing_column.py +++ b/passerelle_reunion_fsn/migrations/0002_entreprise_missing_column.py @@ -1,6 +1,4 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.29 on 2020-04-17 14:52 -from __future__ import unicode_literals from django.db import migrations, models diff --git a/passerelle_reunion_fsn/migrations/0003_csv_checksum.py b/passerelle_reunion_fsn/migrations/0003_csv_checksum.py index b29b030..5a61691 100644 --- a/passerelle_reunion_fsn/migrations/0003_csv_checksum.py +++ b/passerelle_reunion_fsn/migrations/0003_csv_checksum.py @@ -1,6 +1,4 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.29 on 2020-04-19 05:45 -from __future__ import unicode_literals from django.db import migrations, models diff --git a/passerelle_reunion_fsn/migrations/0004_wcs_batches.py b/passerelle_reunion_fsn/migrations/0004_wcs_batches.py index f493f51..e45f202 100644 --- a/passerelle_reunion_fsn/migrations/0004_wcs_batches.py +++ b/passerelle_reunion_fsn/migrations/0004_wcs_batches.py @@ -1,10 +1,9 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.18 on 2020-04-24 15:37 -from __future__ import unicode_literals -from django.db import migrations, models import django.contrib.postgres.fields.jsonb import django.db.models.deletion +from django.db import migrations, models + import passerelle_reunion_fsn.models @@ -18,7 +17,10 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Batch', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), ('batch_date', models.DateField()), ('last_update_datetime', models.DateTimeField(auto_now=True)), ], @@ -29,12 +31,25 @@ class Migration(migrations.Migration): migrations.CreateModel( name='BatchFile', fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('csv_file', models.FileField(upload_to=passerelle_reunion_fsn.models.batch_csv_file_location)), + ( + 'id', + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), + ), + ( + 'csv_file', + models.FileField(upload_to=passerelle_reunion_fsn.models.batch_csv_file_location), + ), ('csv_filename', models.CharField(max_length=256)), ('ready', models.BooleanField(default=False)), ('last_update_datetime', models.DateTimeField(auto_now=True)), - ('batch', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='files', to='passerelle_reunion_fsn.Batch')), + ( + 'batch', + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name='files', + to='passerelle_reunion_fsn.Batch', + ), + ), ], ), migrations.AddField( @@ -50,10 +65,14 @@ class Migration(migrations.Migration): migrations.AddField( model_name='batch', name='resource', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='batches', to='passerelle_reunion_fsn.FSNReunionConnector'), + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name='batches', + to='passerelle_reunion_fsn.FSNReunionConnector', + ), ), migrations.AlterUniqueTogether( name='batch', - unique_together=set([('resource', 'batch_date')]), + unique_together={('resource', 'batch_date')}, ), ] diff --git a/passerelle_reunion_fsn/migrations/0005_text_to_jsonb.py b/passerelle_reunion_fsn/migrations/0005_text_to_jsonb.py index f275271..eb761ed 100644 --- a/passerelle_reunion_fsn/migrations/0005_text_to_jsonb.py +++ b/passerelle_reunion_fsn/migrations/0005_text_to_jsonb.py @@ -1,8 +1,4 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - from django.db import migrations - from passerelle.utils.db import EnsureJsonbType diff --git a/passerelle_reunion_fsn/migrations/0006_auto_20200519_1158.py b/passerelle_reunion_fsn/migrations/0006_auto_20200519_1158.py index b1a20ba..4f0073b 100644 --- a/passerelle_reunion_fsn/migrations/0006_auto_20200519_1158.py +++ b/passerelle_reunion_fsn/migrations/0006_auto_20200519_1158.py @@ -1,6 +1,4 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.18 on 2020-05-19 09:58 -from __future__ import unicode_literals from django.db import migrations @@ -14,6 +12,6 @@ class Migration(migrations.Migration): operations = [ migrations.AlterUniqueTogether( name='entreprise', - unique_together=set([('resource', 'sequence'), ('resource', 'periode', 'siren')]), + unique_together={('resource', 'sequence'), ('resource', 'periode', 'siren')}, ), ] diff --git a/passerelle_reunion_fsn/migrations/0007_entreprise_code_ape.py b/passerelle_reunion_fsn/migrations/0007_entreprise_code_ape.py index f027d2a..aae642f 100644 --- a/passerelle_reunion_fsn/migrations/0007_entreprise_code_ape.py +++ b/passerelle_reunion_fsn/migrations/0007_entreprise_code_ape.py @@ -1,6 +1,4 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.18 on 2020-05-29 23:37 -from __future__ import unicode_literals from django.db import migrations, models diff --git a/passerelle_reunion_fsn/models.py b/passerelle_reunion_fsn/models.py index 8a8e975..27ffa30 100644 --- a/passerelle_reunion_fsn/models.py +++ b/passerelle_reunion_fsn/models.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # passerelle-reunion-fsn # Copyright (C) 2020 Entr'ouvert # @@ -17,19 +15,20 @@ # along with this program. If not, see . import base64 -from datetime import datetime +import csv import hashlib -from io import BytesIO import os import os.path import tempfile +from datetime import datetime +from io import BytesIO from django.conf import settings from django.contrib.postgres.fields import JSONField from django.core.files import File -from django.urls import reverse from django.db import models, transaction from django.http import FileResponse, HttpResponse +from django.urls import reverse from django.utils import dateformat, six from django.utils.dateparse import parse_date, parse_datetime from django.utils.encoding import force_str, force_text, smart_text @@ -40,12 +39,6 @@ from passerelle.base.signature import sign_url from passerelle.utils.api import endpoint from passerelle.utils.jsonresponse import APIError -if six.PY3: - import csv -else: - import unicodecsv as csv - - GENERATE_BATCH_SCHEMA = { "$schema": "http://json-schema.org/draft-04/schema#", "title": "Generate batch", @@ -59,8 +52,8 @@ GENERATE_BATCH_SCHEMA = { "force": { "description": "Force", "type": "boolean", - } - } + }, + }, } @@ -103,7 +96,8 @@ def batch_csv_file_location(instance, filename): return 'fsn_reunion/%s/batch/%s/%s/%s' % ( instance.batch.resource.id, dateformat.format(instance.batch.batch_date, 'Y-m-d'), - instance.id, filename + instance.id, + filename, ) @@ -118,8 +112,10 @@ class FSNReunionConnector(BaseResource): token = models.CharField(max_length=256, verbose_name=_('DS token')) demarche_number = models.IntegerField(verbose_name=_('Demarche number')) instructeur_id = models.CharField( - max_length=256, blank=True, verbose_name=_('Instructeur identifier'), - help_text=_('Region identifier for this case') + max_length=256, + blank=True, + verbose_name=_('Instructeur identifier'), + help_text=_('Region identifier for this case'), ) wcs_form_slug = models.CharField(max_length=256, blank=True, verbose_name=_('WCS form slug')) wcs_options = JSONField(null=True, blank=True) @@ -136,13 +132,8 @@ class FSNReunionConnector(BaseResource): self.logger.info('end generate batch') def _ds_call(self, query, variables): - headers = { - 'Authorization': 'Bearer token=' + self.token - } - data = { - 'query': query, - 'variables': variables - } + headers = {'Authorization': 'Bearer token=' + self.token} + data = {'query': query, 'variables': variables} response = self.requests.post(self.api_url, headers=headers, json=data) if response.status_code != 200: raise APIError('An error occured, status code : %s' % response.status_code) @@ -160,7 +151,7 @@ class FSNReunionConnector(BaseResource): def _wcs_call(self, filters={}): if not getattr(settings, 'KNOWN_SERVICES', {}).get('wcs'): - raise APIError('No wcs found') + raise APIError('No wcs found') wcs_service = list(settings.KNOWN_SERVICES['wcs'].values())[0] if self.wcs_options and 'instance' in self.wcs_options: @@ -169,12 +160,7 @@ class FSNReunionConnector(BaseResource): orig = wcs_service.get('orig') secret = wcs_service.get('secret') limit = 10 - params = { - 'orig': orig, - 'full': 'on', - 'limit': limit, - 'order_by': '-receipt_time' - } + params = {'orig': orig, 'full': 'on', 'limit': limit, 'order_by': '-receipt_time'} params.update(self.wcs_options.get('filters', {})) params.update(filters) @@ -184,11 +170,8 @@ class FSNReunionConnector(BaseResource): params['offset'] = offset query_string = urlparse.urlencode(params) api_url = sign_url( - urlparse.urljoin( - base_url, - 'api/forms/%s/list?%s' % (self.wcs_form_slug, query_string) - ), - key=secret + urlparse.urljoin(base_url, 'api/forms/%s/list?%s' % (self.wcs_form_slug, query_string)), + key=secret, ) response = self.requests.get(api_url) if response.status_code != 200: @@ -197,22 +180,18 @@ class FSNReunionConnector(BaseResource): if not data: has_data = False else: - for form in data: - yield form + yield from data offset += limit - @endpoint( - methods=['get'], perm='can_access', name='wcs-call', - description=_('Call wcs debug') - ) + @endpoint(methods=['get'], perm='can_access', name='wcs-call', description=_('Call wcs debug')) def wcs_call(self, request): - return { - 'data': [form for form in self._wcs_call()] - } + return {'data': [form for form in self._wcs_call()]} @endpoint( - methods=['get'], perm='can_access', name='dsproxy-get-instructeurs', - description=_('DS Proxy: get instructeurs') + methods=['get'], + perm='can_access', + name='dsproxy-get-instructeurs', + description=_('DS Proxy: get instructeurs'), ) def dsproxy_get_instructeurs(self, request): query = ''' @@ -225,15 +204,13 @@ query getInstructeurs($demarcheNumber: Int!){ } } ''' - variables = { - 'demarcheNumber': self.demarche_number - } - return { - 'data': self._ds_call(query, variables) - } + variables = {'demarcheNumber': self.demarche_number} + return {'data': self._ds_call(query, variables)} @endpoint( - methods=['get'], perm='can_access', name='dsproxy-get-liste-champs', + methods=['get'], + perm='can_access', + name='dsproxy-get-liste-champs', description=_('DS Proxy: get fields identifiers'), ) def dsproxy_get_liste_champs(self, request): @@ -249,12 +226,8 @@ query getChampDescriptors($demarcheNumber: Int!) { } } ''' - variables = { - 'demarcheNumber': self.demarche_number - } - return { - 'data': self._ds_call(query, variables) - } + variables = {'demarcheNumber': self.demarche_number} + return {'data': self._ds_call(query, variables)} def _ds_get_dossiers(self): if not self.instructeur_id: @@ -328,10 +301,7 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: } } ''' - variables = { - 'demarcheNumber': self.demarche_number, - 'after': None - } + variables = {'demarcheNumber': self.demarche_number, 'after': None} dossiers = [] has_next_page = True while has_next_page: @@ -346,32 +316,26 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: def get_passage_en_construction(x): return parse_datetime(x['datePassageEnConstruction']) - return { - 'dossiers': sorted(dossiers, key=get_passage_en_construction), - 'num_dossiers': len(dossiers) - } + return {'dossiers': sorted(dossiers, key=get_passage_en_construction), 'num_dossiers': len(dossiers)} @endpoint( - methods=['get'], perm='can_access', name='dsproxy-get-dossiers', - description=_('DS Proxy: get dossiers') + methods=['get'], + perm='can_access', + name='dsproxy-get-dossiers', + description=_('DS Proxy: get dossiers'), ) def dsproxy_get_dossiers(self, request): - return { - 'data': self._ds_get_dossiers() - } + return {'data': self._ds_get_dossiers()} @endpoint( - methods=['post'], perm='can_access', name='fetch-dossiers', - description=_('Fetch dossiers from DS and consolidate into local data') + methods=['post'], + perm='can_access', + name='fetch-dossiers', + description=_('Fetch dossiers from DS and consolidate into local data'), ) def fetch_dossiers(self, request): dossiers = self._fetch_dossiers(request) - return { - 'data': { - 'dossiers': dossiers, - 'num_dossiers': len(dossiers) - } - } + return {'data': {'dossiers': dossiers, 'num_dossiers': len(dossiers)}} def _fetch_dossiers(self, request=None): res = [] @@ -398,8 +362,11 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: response = self.requests.get(file_url) assert response.status_code == 200 ds_dossier = DSDossier.objects.create( - resource=self, ds_id=id_dossier, csv_filename=filename, - ds_state=dossier['state'], csv_checksum=champ['file']['checksum'] + resource=self, + ds_id=id_dossier, + csv_filename=filename, + ds_state=dossier['state'], + csv_checksum=champ['file']['checksum'], ) ds_dossier.csv_file.save(filename, BytesIO(response.content)) @@ -430,30 +397,22 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: return res - @endpoint( - methods=['get'], perm='can_access', name='get-dossiers', - description=_('Get dossiers') - ) + @endpoint(methods=['get'], perm='can_access', name='get-dossiers', description=_('Get dossiers')) def get_dossiers(self, request): res = [] for dossier in self.dossiers.all(): res.append(dossier.to_json(request)) - return { - 'data': { - 'dossiers': res - } - } + return {'data': {'dossiers': res}} @endpoint( - methods=['get'], perm='can_access', example_pattern='{dossier_pk}/', - pattern='^(?P\w+)/$', name='get-dossier-file', - parameters={ - 'dossier_pk': { - 'description': _('Local dossier identifier'), - 'example_value': '2' - } - }, description=_('Get csv file from dossier') + methods=['get'], + perm='can_access', + example_pattern='{dossier_pk}/', + pattern=r'^(?P\w+)/$', + name='get-dossier-file', + parameters={'dossier_pk': {'description': _('Local dossier identifier'), 'example_value': '2'}}, + description=_('Get csv file from dossier'), ) def get_dossier_file(self, request, dossier_pk): try: @@ -466,15 +425,14 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: return response @endpoint( - methods=['get'], perm='can_access', name='get-csv', - description=_('Get consolidated data (csv file)') + methods=['get'], perm='can_access', name='get-csv', description=_('Get consolidated data (csv file)') ) def get_csv(self, request): - - class Echo(object): + class Echo: """An object that implements just the write method of the file-like interface. """ + def write(self, value): """Write the value by returning it, instead of storing in a buffer.""" return value @@ -486,22 +444,16 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: pseudo_buffer = Echo() writer = csv.writer(pseudo_buffer) - response = FileResponse( - (writer.writerow(row) for row in get_rows()), content_type="text/csv" - ) + response = FileResponse((writer.writerow(row) for row in get_rows()), content_type="text/csv") response['Content-Disposition'] = 'attachment; filename="somefilename.csv"' return response @endpoint( - methods=['get'], perm='can_access', name='get-data', - description=_('Get data by sequence or siren') + methods=['get'], perm='can_access', name='get-data', description=_('Get data by sequence or siren') ) def get_data(self, request, sequence=None, siren=None): - def build_result(entreprise): - return { - 'data': {attr: getattr(entreprise, attr) for attr in COLUMNS_KEYNAMES} - } + return {'data': {attr: getattr(entreprise, attr) for attr in COLUMNS_KEYNAMES}} if sequence is None and siren is None: raise APIError('Need sequence or siren') @@ -518,14 +470,9 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: if entreprise: return build_result(entreprise) - return { - 'data': {} - } + return {'data': {}} - @endpoint( - methods=['get'], perm='can_access', name='get-batches', - description=_('Get batches') - ) + @endpoint(methods=['get'], perm='can_access', name='get-batches', description=_('Get batches')) def get_batches(self, request): res = [] for batch in self.batches.all(): @@ -533,43 +480,37 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: { 'id': batch.pk, 'text': dateformat.format(batch.batch_date, 'l d F Y'), - 'url': request.build_absolute_uri(batch.get_absolute_url()) + 'url': request.build_absolute_uri(batch.get_absolute_url()), } ) return {'data': res} @endpoint( - methods=['get'], perm='can_access', name='batchfile', - pattern='^(?P\w+)/$', example_pattern='{batchfile_pk}/', - parameters={ - 'batchfile_pk': { - 'description': _('Batch file identifier'), - 'example_value': '2' - } - }, description=_('Get batch file') - + methods=['get'], + perm='can_access', + name='batchfile', + pattern=r'^(?P\w+)/$', + example_pattern='{batchfile_pk}/', + parameters={'batchfile_pk': {'description': _('Batch file identifier'), 'example_value': '2'}}, + description=_('Get batch file'), ) def batchfile(self, request, batchfile_pk): try: batch_file = BatchFile.objects.get(pk=batchfile_pk) except BatchFile.DoesNotExist: raise APIError('Unkwon batch file identifier') - response = HttpResponse( - batch_file.csv_file.read(), content_type="text/csv" - ) + response = HttpResponse(batch_file.csv_file.read(), content_type="text/csv") response['Content-Disposition'] = 'attachment; filename="%s"' % batch_file.csv_filename return response @endpoint( - methods=['get'], perm='can_access', name='batch', + methods=['get'], + perm='can_access', + name='batch', description=_('Get batch'), - pattern='^(?P\w+)/$', example_pattern='{batch_pk}/', - parameters={ - 'batch_pk': { - 'description': _('Batch identifier'), - 'example_value': '2' - } - } + pattern=r'^(?P\w+)/$', + example_pattern='{batch_pk}/', + parameters={'batch_pk': {'description': _('Batch identifier'), 'example_value': '2'}}, ) def batch(self, request, batch_pk): try: @@ -593,20 +534,15 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: res['batch_files'] = batch_files res.update(batch.to_json()) res['url'] = request.build_absolute_uri(res['url']) - return { - 'data': res - } + return {'data': res} @endpoint( - perm='can_access', name='generate-batches', + perm='can_access', + name='generate-batches', post={ 'description': _('Generate batches'), - 'request_body': { - 'schema': { - 'application/json': GENERATE_BATCH_SCHEMA - } - } - } + 'request_body': {'schema': {'application/json': GENERATE_BATCH_SCHEMA}}, + }, ) def generate_batches(self, request, post_data): batch_date = post_data.get('batch_date') @@ -616,10 +552,7 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: data['url'] = request.build_absolute_uri(data['url']) return data - return { - 'data': [abs_url(batch.to_json()) for batch in self._generate_batches(batch_date, force)] - - } + return {'data': [abs_url(batch.to_json()) for batch in self._generate_batches(batch_date, force)]} @transaction.atomic def _generate_batches(self, batch_date=None, force=False): @@ -645,12 +578,8 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: def add_target_batch(refs, target): fd, temp_file_name = tempfile.mkstemp() os.close(fd) - if six.PY3: - f = open(temp_file_name, 'w', encoding='utf-8') - refs[target] = (csv.writer(f, delimiter=';'), f, temp_file_name) - else: - f = open(temp_file_name, 'wb') - refs[target] = (csv.writer(f, delimiter=';', encoding='utf-8'), f, temp_file_name) + f = open(temp_file_name, 'w', encoding='utf-8') + refs[target] = (csv.writer(f, delimiter=';'), f, temp_file_name) target_batches = {} now = datetime.now().date() @@ -686,7 +615,9 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: sequence = "%s-%s" % (code_region, form['display_id']) period = 'MARS-VOLET2' siren = form_fields['siren'] - nom1 = get_data(form_fields, 'nom_responsable') + ' ' + get_data(form_fields, 'prenom_responsable') + nom1 = ( + get_data(form_fields, 'nom_responsable') + ' ' + get_data(form_fields, 'prenom_responsable') + ) nom2 = '' nb_salaries = form_fields.get('nb_salaries', 0) rue = form_fields['numero_voie'] @@ -704,10 +635,28 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: write_row( target_batches[batch][0], [ - code_app, sequence, period, siren, nom1, nom2, nb_salaries, rue, - bp, cp, ville, code_pays, code_region, iban, montant, devise, nom_demandeur, - prenom_demandeur, qualite, tel, courriel - ] + code_app, + sequence, + period, + siren, + nom1, + nom2, + nb_salaries, + rue, + bp, + cp, + ville, + code_pays, + code_region, + iban, + montant, + devise, + nom_demandeur, + prenom_demandeur, + qualite, + tel, + courriel, + ], ) # create batch file objects @@ -725,9 +674,11 @@ query getDossiers($demarcheNumber: Int!, $createdSince: ISO8601DateTime, $first: class Entreprise(models.Model): - class Meta: - unique_together = (('resource', 'sequence'), ('resource', 'periode', 'siren'),) + unique_together = ( + ('resource', 'sequence'), + ('resource', 'periode', 'siren'), + ) resource = models.ForeignKey(FSNReunionConnector, on_delete=models.CASCADE) code_application = models.CharField(max_length=20) @@ -761,13 +712,10 @@ class Entreprise(models.Model): class DSDossier(models.Model): - class Meta: unique_together = (('resource', 'ds_id'),) - resource = models.ForeignKey( - FSNReunionConnector, on_delete=models.CASCADE, related_name='dossiers' - ) + resource = models.ForeignKey(FSNReunionConnector, on_delete=models.CASCADE, related_name='dossiers') ds_id = models.CharField(max_length=256) ds_state = models.CharField(max_length=256) csv_file = models.FileField(upload_to=csv_file_location) @@ -776,14 +724,17 @@ class DSDossier(models.Model): last_update_datetime = models.DateTimeField(auto_now=True) def to_json(self, request=None): - csv_file_url = reverse( - 'generic-endpoint', - kwargs={ - 'connector': self.resource.get_connector_slug(), - 'slug': self.resource.slug, - 'endpoint': 'get-dossier-file' - } - ) + '/%s/' % self.id + csv_file_url = ( + reverse( + 'generic-endpoint', + kwargs={ + 'connector': self.resource.get_connector_slug(), + 'slug': self.resource.slug, + 'endpoint': 'get-dossier-file', + }, + ) + + '/%s/' % self.id + ) if request is not None: csv_file_url = request.build_absolute_uri(csv_file_url) @@ -794,7 +745,7 @@ class DSDossier(models.Model): 'csv_filename': self.csv_filename, 'csv_file': csv_file_url, 'csv_checksum': self.csv_checksum, - 'last_update_datetime': self.last_update_datetime + 'last_update_datetime': self.last_update_datetime, } def get_content_without_bom(self): @@ -827,14 +778,11 @@ class DSDossier(models.Model): class Batch(models.Model): - class Meta: unique_together = (('resource', 'batch_date'),) ordering = ('batch_date',) - resource = models.ForeignKey( - FSNReunionConnector, on_delete=models.CASCADE, related_name='batches' - ) + resource = models.ForeignKey(FSNReunionConnector, on_delete=models.CASCADE, related_name='batches') batch_date = models.DateField() last_update_datetime = models.DateTimeField(auto_now=True) @@ -842,25 +790,26 @@ class Batch(models.Model): return { 'batch_date': self.batch_date, 'last_update_datetime': self.last_update_datetime, - 'url': self.get_absolute_url() + 'url': self.get_absolute_url(), } def get_absolute_url(self): - return reverse( - 'generic-endpoint', - kwargs={ - 'connector': self.resource.get_connector_slug(), - 'slug': self.resource.slug, - 'endpoint': 'batch' - } - ) + '/%s/' % self.pk + return ( + reverse( + 'generic-endpoint', + kwargs={ + 'connector': self.resource.get_connector_slug(), + 'slug': self.resource.slug, + 'endpoint': 'batch', + }, + ) + + '/%s/' % self.pk + ) class BatchFile(models.Model): - batch = models.ForeignKey( - Batch, on_delete=models.CASCADE, related_name='files' - ) + batch = models.ForeignKey(Batch, on_delete=models.CASCADE, related_name='files') csv_file = models.FileField(upload_to=batch_csv_file_location) csv_filename = models.CharField(max_length=256) ready = models.BooleanField(default=False) @@ -871,15 +820,18 @@ class BatchFile(models.Model): 'csv_filename': self.csv_filename, 'ready': self.ready, 'last_update_datetime': self.last_update_datetime, - 'url': self.get_absolute_url() + 'url': self.get_absolute_url(), } def get_absolute_url(self): - return reverse( - 'generic-endpoint', - kwargs={ - 'connector': self.batch.resource.get_connector_slug(), - 'slug': self.batch.resource.slug, - 'endpoint': 'batchfile' - } - ) + '/%s/' % self.pk + return ( + reverse( + 'generic-endpoint', + kwargs={ + 'connector': self.batch.resource.get_connector_slug(), + 'slug': self.batch.resource.slug, + 'endpoint': 'batchfile', + }, + ) + + '/%s/' % self.pk + ) diff --git a/setup.py b/setup.py index 029ca45..a53a215 100644 --- a/setup.py +++ b/setup.py @@ -2,13 +2,12 @@ import os import subprocess - -from setuptools import setup, find_packages from distutils.command.sdist import sdist +from setuptools import find_packages, setup + class eo_sdist(sdist): - def run(self): if os.path.exists('VERSION'): os.remove('VERSION') @@ -23,10 +22,10 @@ class eo_sdist(sdist): def get_version(): '''Use the VERSION, if absent generates a version with git describe, if not - tag exists, take 0.0- and add the length of the commit log. + tag exists, take 0.0- and add the length of the commit log. ''' if os.path.exists('VERSION'): - with open('VERSION', 'r') as v: + with open('VERSION') as v: return v.read() if os.path.exists('.git'): p = subprocess.Popen( @@ -54,10 +53,8 @@ setup( author='Emmanuel Cazenave', author_email='ecazenave@entrouvert.com', packages=find_packages(), - install_requires=[ - 'unicodecsv' - ], + install_requires=['unicodecsv'], cmdclass={ 'sdist': eo_sdist, - } + }, ) diff --git a/tests/settings.py b/tests/settings.py index cf8be69..7c3fa21 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -1,6 +1,5 @@ import os - INSTALLED_APPS += ('passerelle_reunion_fsn',) @@ -8,7 +7,8 @@ DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'TEST': { - 'NAME': 'passerelle-reunion-fsn-test-%s' % os.environ.get("BRANCH_NAME", "").replace('/', '-')[:63], - } + 'NAME': 'passerelle-reunion-fsn-test-%s' + % os.environ.get("BRANCH_NAME", "").replace('/', '-')[:63], + }, } } diff --git a/tests/test_connector.py b/tests/test_connector.py index 6979ae0..8fff566 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -1,13 +1,10 @@ -# -*- coding: utf-8 -*- - - -from django.contrib.contenttypes.models import ContentType -from django.core.cache import cache import django_webtest import pytest +from django.contrib.contenttypes.models import ContentType +from django.core.cache import cache +from passerelle.base.models import AccessRight, ApiUser from passerelle_reunion_fsn.models import FSNReunionConnector -from passerelle.base.models import ApiUser, AccessRight @pytest.fixture @@ -22,14 +19,13 @@ def app(request): @pytest.fixture def connector(db): connector = FSNReunionConnector.objects.create( - slug='test', api_url='https://whatever', token='token', demarche_number=1, - instructeur_id='xxxx' + slug='test', api_url='https://whatever', token='token', demarche_number=1, instructeur_id='xxxx' ) api = ApiUser.objects.create(username='all', keytype='', key='') obj_type = ContentType.objects.get_for_model(connector) AccessRight.objects.create( - codename='can_access', apiuser=api, - resource_type=obj_type, resource_pk=connector.pk) + codename='can_access', apiuser=api, resource_type=obj_type, resource_pk=connector.pk + ) def test_dummny(app, connector):