trivial: apply black (#49820)

This commit is contained in:
Frédéric Péters 2021-02-20 16:26:01 +01:00
parent 4540043a25
commit 3d9df1e526
412 changed files with 17549 additions and 11696 deletions

View File

@ -9,6 +9,6 @@ STATIC_ROOT = 'collected-static'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'passerelle.sqlite3',
'NAME': 'passerelle.sqlite3',
}
}

34
debian/settings.py vendored
View File

@ -12,12 +12,12 @@
# This file is sourced by "execfile" from /usr/lib/passerelle/debian_config.py
# SECURITY WARNING: don't run with debug turned on in production!
#DEBUG = False
# DEBUG = False
#ADMINS = (
# ADMINS = (
# ('User 1', 'poulpe@example.org'),
# ('User 2', 'janitor@example.net'),
#)
# )
# ALLOWED_HOSTS must be correct in production!
# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
@ -29,26 +29,26 @@ ALLOWED_HOSTS = ['*']
# Database
# Warning: don't change ENGINE, it must be 'tenant_schemas.postgresql_backend'
#DATABASES['default']['NAME'] = 'passerelle'
#DATABASES['default']['USER'] = 'passerelle'
#DATABASES['default']['PASSWORD'] = '******'
#DATABASES['default']['HOST'] = 'localhost'
#DATABASES['default']['PORT'] = '5432'
# DATABASES['default']['NAME'] = 'passerelle'
# DATABASES['default']['USER'] = 'passerelle'
# DATABASES['default']['PASSWORD'] = '******'
# DATABASES['default']['HOST'] = 'localhost'
# DATABASES['default']['PORT'] = '5432'
LANGUAGE_CODE = 'fr-fr'
TIME_ZONE = 'Europe/Paris'
# Email configuration
#EMAIL_SUBJECT_PREFIX = '[passerelle] '
#SERVER_EMAIL = 'root@passerelle.example.org'
#DEFAULT_FROM_EMAIL = 'webmaster@passerelle.example.org'
# EMAIL_SUBJECT_PREFIX = '[passerelle] '
# SERVER_EMAIL = 'root@passerelle.example.org'
# DEFAULT_FROM_EMAIL = 'webmaster@passerelle.example.org'
# SMTP configuration
#EMAIL_HOST = 'localhost'
#EMAIL_HOST_USER = ''
#EMAIL_HOST_PASSWORD = ''
#EMAIL_PORT = 25
# EMAIL_HOST = 'localhost'
# EMAIL_HOST_USER = ''
# EMAIL_HOST_PASSWORD = ''
# EMAIL_PORT = 25
# HTTPS
#CSRF_COOKIE_SECURE = True
#SESSION_COOKIE_SECURE = True
# CSRF_COOKIE_SECURE = True
# SESSION_COOKIE_SECURE = True

View File

@ -9,15 +9,16 @@ def pytest_addoption(parser):
parser.addoption("--cmis-endpoint", help="Url of a passerelle CMIS endpoint")
parser.addoption("--cmis-username", help="Username for the CMIS endpoint")
parser.addoption("--cmis-password", help="Password for the CMIS endpoint")
parser.addoption(
"--preserve-tree", action="store_true", default=False, help="Preserve test directory")
parser.addoption("--preserve-tree", action="store_true", default=False, help="Preserve test directory")
@pytest.fixture(scope='session')
def cmisclient(request):
return cmislib.CmisClient(
request.config.getoption("--cmis-endpoint"), request.config.getoption("--cmis-username"),
request.config.getoption("--cmis-password"))
request.config.getoption("--cmis-endpoint"),
request.config.getoption("--cmis-username"),
request.config.getoption("--cmis-password"),
)
@pytest.fixture(scope='session')

View File

@ -10,11 +10,16 @@ import requests
SPECIAL_CHARS = '!#$%&+-^_`;[]{}+='
@pytest.mark.parametrize("path,file_name", [
('', 'some.file'), ('/toto', 'some.file'), ('/toto/tata', 'some.file'),
('/toto', 'some.other'),
('/%s' % SPECIAL_CHARS, '%(spe)s.%(spe)s' % {'spe': SPECIAL_CHARS})
])
@pytest.mark.parametrize(
"path,file_name",
[
('', 'some.file'),
('/toto', 'some.file'),
('/toto/tata', 'some.file'),
('/toto', 'some.other'),
('/%s' % SPECIAL_CHARS, '%(spe)s.%(spe)s' % {'spe': SPECIAL_CHARS}),
],
)
def test_uploadfile(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, monkeypatch, path, file_name):
result_filename = 'result.file'
monkeypatch.chdir(tmpdir)
@ -25,9 +30,12 @@ def test_uploadfile(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, monkeypatch
with orig_file.open('rb') as f:
file_b64_content = base64.b64encode(f.read())
response = requests.post(
url, json={"path": cmis_tmpdir + path,
"file": {"content": file_b64_content, "filename": file_name,
"content_type": "image/jpeg"}})
url,
json={
"path": cmis_tmpdir + path,
"file": {"content": file_b64_content, "filename": file_name, "content_type": "image/jpeg"},
},
)
assert response.status_code == 200
resp_data = response.json()
assert resp_data['err'] == 0
@ -50,17 +58,23 @@ def test_uploadfile_conflict(cmisclient, cmis_connector, cmis_tmpdir, tmpdir, mo
url = urlparse.urljoin(cmis_connector, 'uploadfile')
file_b64_content = base64.b64encode('file_content')
response = requests.post(
url, json={"path": cmis_tmpdir + '/uploadconflict',
"file": {"content": file_b64_content, "filename": 'some.file',
"content_type": "image/jpeg"}})
url,
json={
"path": cmis_tmpdir + '/uploadconflict',
"file": {"content": file_b64_content, "filename": 'some.file', "content_type": "image/jpeg"},
},
)
assert response.status_code == 200
resp_data = response.json()
assert resp_data['err'] == 0
file_b64_content = base64.b64encode('other_file_content')
response = requests.post(
url, json={"path": cmis_tmpdir + '/uploadconflict',
"file": {"content": file_b64_content, "filename": 'some.file',
"content_type": "image/jpeg"}})
url,
json={
"path": cmis_tmpdir + '/uploadconflict',
"file": {"content": file_b64_content, "filename": 'some.file', "content_type": "image/jpeg"},
},
)
assert response.status_code == 200
resp_data = response.json()
assert resp_data['err'] == 1

View File

@ -2,8 +2,7 @@ import pytest
def pytest_addoption(parser):
parser.addoption(
"--url", help="Url of a passerelle Planitech connector instance")
parser.addoption("--url", help="Url of a passerelle Planitech connector instance")
@pytest.fixture(scope='session')

View File

@ -7,10 +7,9 @@ import requests
def test_main(conn):
# get days
query_string = parse.urlencode({
'start_days': 1, 'end_days': 90, 'start_time': '10:00', 'end_time': '11:00',
'display': 'date'
})
query_string = parse.urlencode(
{'start_days': 1, 'end_days': 90, 'start_time': '10:00', 'end_time': '11:00', 'display': 'date'}
)
url = conn + '/getfreegaps?%s' % query_string
resp = requests.get(url)
resp.raise_for_status()
@ -20,10 +19,9 @@ def test_main(conn):
assert data
# get places
query_string = parse.urlencode({
'start_days': 1, 'end_days': 90, 'start_time': '10:00', 'end_time': '11:00',
'display': 'place'
})
query_string = parse.urlencode(
{'start_days': 1, 'end_days': 90, 'start_time': '10:00', 'end_time': '11:00', 'display': 'place'}
)
url = conn + '/getfreegaps?%s' % query_string
resp = requests.get(url)
resp.raise_for_status()
@ -34,10 +32,16 @@ def test_main(conn):
place = data[random.randint(0, len(data) - 1)]['id']
# get days on one place
query_string = parse.urlencode({
'start_days': 1, 'end_days': 90, 'start_time': '10:00', 'end_time': '11:00',
'place_id': place, 'display': 'date'
})
query_string = parse.urlencode(
{
'start_days': 1,
'end_days': 90,
'start_time': '10:00',
'end_time': '11:00',
'place_id': place,
'display': 'date',
}
)
url = conn + '/getfreegaps?%s' % query_string
resp = requests.get(url)
resp.raise_for_status()
@ -55,10 +59,19 @@ def test_main(conn):
chosen_date = data[0]['id']
# create reservation
params = {
'date': chosen_date, 'start_time': '10:00', 'end_time': '11:00',
'place_id': place, 'price': 200, 'name_id': 'john-doe', 'type_id': resa_type_id,
'first_name': 'jon', 'last_name': 'doe', 'activity_id': activity_id,
'email': 'jon.doe@localhost', 'object': 'reservation object', 'vat_rate': 200
'date': chosen_date,
'start_time': '10:00',
'end_time': '11:00',
'place_id': place,
'price': 200,
'name_id': 'john-doe',
'type_id': resa_type_id,
'first_name': 'jon',
'last_name': 'doe',
'activity_id': activity_id,
'email': 'jon.doe@localhost',
'object': 'reservation object',
'vat_rate': 200,
}
print('Create reservation parameters \n')
pprint.pprint(params)
@ -76,9 +89,7 @@ def test_main(conn):
reservation_id = data['reservation_id']
# confirm reservation
params = {
'reservation_id': reservation_id, 'status': 'standard'
}
params = {'reservation_id': reservation_id, 'status': 'standard'}
url = conn + '/updatereservation'
resp = requests.post(url, json=params)
resp.raise_for_status()

View File

@ -2,8 +2,7 @@ import pytest
def pytest_addoption(parser):
parser.addoption(
"--url", help="Url of a passerelle Toulouse Axel connector instance")
parser.addoption("--url", help="Url of a passerelle Toulouse Axel connector instance")
parser.addoption("--nameid", help="Publik Name ID")
parser.addoption("--firstname", help="first name of a user")
parser.addoption("--lastname", help="Last name of a user")

View File

@ -45,19 +45,58 @@ def test_link(conn, user):
payload['DROITALIMAGE'] = 'NON'
payload['REVENUS']['CHOIXREVENU'] = ''
# remove non editable fields
for key in ['SITUATIONFAMILIALE', 'SITUATIONFAMILIALE_label', 'NBENFANTACTIF', 'NBRLACTIF', 'IDDUI', 'CODEMISEAJOUR',
'management_dates', 'annee_reference', 'annee_reference_label', 'annee_reference_short']:
for key in [
'SITUATIONFAMILIALE',
'SITUATIONFAMILIALE_label',
'NBENFANTACTIF',
'NBRLACTIF',
'IDDUI',
'CODEMISEAJOUR',
'management_dates',
'annee_reference',
'annee_reference_label',
'annee_reference_short',
]:
payload.pop(key)
for key in ['IDPERSONNE', 'NOM', 'PRENOM', 'NOMJEUNEFILLE', 'DATENAISSANCE', 'CIVILITE', 'INDICATEURRL', 'CSP_label']:
for key in [
'IDPERSONNE',
'NOM',
'PRENOM',
'NOMJEUNEFILLE',
'DATENAISSANCE',
'CIVILITE',
'INDICATEURRL',
'CSP_label',
]:
if 'RL1' in payload:
payload['RL1'].pop(key)
if 'RL2' in payload:
payload['RL2'].pop(key)
for key in ['MONTANTTOTAL', 'DATEVALIDITE', 'SFI', 'IREVENUS', 'RNF', 'NBENFANTSACHARGE', 'TYPEREGIME_label']:
for key in [
'MONTANTTOTAL',
'DATEVALIDITE',
'SFI',
'IREVENUS',
'RNF',
'NBENFANTSACHARGE',
'TYPEREGIME_label',
]:
payload['REVENUS'].pop(key, None)
for enfant in payload['ENFANT']:
for key in ['id', 'text', 'NOM', 'DATENAISSANCE', 'SEXE', 'PRENOMPERE', 'PRENOMMERE', 'NOMPERE', 'NOMMERE', 'RATTACHEAUTREDUI', 'PRENOM',
'clae_cantine_current']:
for key in [
'id',
'text',
'NOM',
'DATENAISSANCE',
'SEXE',
'PRENOMPERE',
'PRENOMMERE',
'NOMPERE',
'NOMMERE',
'RATTACHEAUTREDUI',
'PRENOM',
'clae_cantine_current',
]:
enfant.pop(key)
enfant['AUTORISATIONURGENCEMEDICALE'] = 'OUI'
# manage contact fields
@ -98,8 +137,10 @@ def test_link(conn, user):
# add partial update flags
flags = [
'maj:adresse',
'maj:rl1', 'maj:rl1_adresse_employeur',
'maj:rl2', 'maj:rl2_adresse_employeur',
'maj:rl1',
'maj:rl1_adresse_employeur',
'maj:rl2',
'maj:rl2_adresse_employeur',
'maj:revenus',
]
for i in range(0, 6):

View File

@ -2,8 +2,7 @@ import pytest
def pytest_addoption(parser):
parser.addoption(
"--url", help="Url of a passerelle Vivaticket connector instance")
parser.addoption("--url", help="Url of a passerelle Vivaticket connector instance")
@pytest.fixture(scope='session')

View File

@ -4,6 +4,7 @@ import datetime
import requests
import random
def call_generic(conn, endpoint):
print("%s \n" % endpoint)
url = conn + '/%s' % endpoint
@ -21,17 +22,24 @@ def call_generic(conn, endpoint):
def test_get_events(conn):
call_generic(conn, 'events')
def test_get_rooms(conn):
call_generic(conn, 'rooms')
def test_get_themes(conn):
call_generic(conn, 'themes')
def test_book_event(conn):
url = conn + '/book'
payload = {'id': 'formid', 'email': 'foo@example.com',
'datetime': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M'),
'room': '001', 'theme': 'A0001', 'quantity': 1
payload = {
'id': 'formid',
'email': 'foo@example.com',
'datetime': datetime.datetime.now().strftime('%Y-%m-%dT%H:%M'),
'room': '001',
'theme': 'A0001',
'quantity': 1,
}
events = call_generic(conn, 'events')
random.shuffle(events)
@ -42,7 +50,7 @@ def test_book_event(conn):
themes = call_generic(conn, 'themes')
random.shuffle(themes)
payload['theme'] = themes[0]['id']
print("Creating booking with the following payload:\n%s" % payload)
print("Creating booking with the following payload:\n%s" % payload)
resp = requests.post(url, json=payload)
resp.raise_for_status()
res = resp.json()

View File

@ -14,12 +14,38 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='ActesWeb',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('title', models.CharField(max_length=50, verbose_name='Title')),
('description', models.TextField(verbose_name='Description')),
('slug', models.SlugField(verbose_name='Identifier', unique=True)),
('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])),
('users', models.ManyToManyField(to='base.ApiUser', related_name='_actesweb_users_+', related_query_name='+', blank=True)),
(
'log_level',
models.CharField(
default=b'INFO',
max_length=10,
verbose_name='Log Level',
choices=[
(b'NOTSET', b'NOTSET'),
(b'DEBUG', b'DEBUG'),
(b'INFO', b'INFO'),
(b'WARNING', b'WARNING'),
(b'ERROR', b'ERROR'),
(b'CRITICAL', b'CRITICAL'),
],
),
),
(
'users',
models.ManyToManyField(
to='base.ApiUser',
related_name='_actesweb_users_+',
related_query_name='+',
blank=True,
),
),
],
options={
'verbose_name': "ActesWeb - Demande d'acte d'\xe9tat civil",

View File

@ -32,6 +32,8 @@ from passerelle.compat import json_loads
from passerelle.utils.api import endpoint
from passerelle.utils.jsonresponse import APIError
from passerelle.utils.conversion import ensure_encoding
@contextlib.contextmanager
def named_tempfile(*args, **kwargs):
with tempfile.NamedTemporaryFile(*args, **kwargs) as fp:
@ -46,8 +48,7 @@ class ActesWeb(BaseResource):
@property
def basepath(self):
return os.path.join(
default_storage.path('actesweb'), self.slug)
return os.path.join(default_storage.path('actesweb'), self.slug)
@endpoint(perm='can_access', methods=['post'], description=_('Create demand'))
def create(self, request, *args, **kwargs):
@ -88,6 +89,6 @@ class ActesWeb(BaseResource):
tempfile_name = tpf.name
os.rename(tempfile_name, filepath)
# set read only permission for owner and group
os.chmod(filepath, stat.S_IRUSR|stat.S_IRGRP|stat.S_IWGRP)
os.chmod(filepath, stat.S_IRUSR | stat.S_IRGRP | stat.S_IWGRP)
demand_id = '%s_%s' % (application_id, os.path.basename(filepath))
return {'data': {'demand_id': demand_id}}

View File

@ -14,12 +14,38 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='AirQuality',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('title', models.CharField(verbose_name='Title', max_length=50)),
('slug', models.SlugField(verbose_name='Identifier', unique=True)),
('description', models.TextField(verbose_name='Description')),
('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])),
('users', models.ManyToManyField(to='base.ApiUser', related_name='_airquality_users_+', related_query_name='+', blank=True)),
(
'log_level',
models.CharField(
default=b'INFO',
max_length=10,
verbose_name='Log Level',
choices=[
(b'NOTSET', b'NOTSET'),
(b'DEBUG', b'DEBUG'),
(b'INFO', b'INFO'),
(b'WARNING', b'WARNING'),
(b'ERROR', b'ERROR'),
(b'CRITICAL', b'CRITICAL'),
],
),
),
(
'users',
models.ManyToManyField(
to='base.ApiUser',
related_name='_airquality_users_+',
related_query_name='+',
blank=True,
),
),
],
options={
'verbose_name': 'Air Quality',

View File

@ -28,24 +28,28 @@ from passerelle.utils.jsonresponse import APIError
class AirQuality(BaseResource):
category = _('Misc')
api_description = _(u'''
api_description = _(
u'''
This API provides a unique format for the air quality data of various places.
(But only supports the Rhône-Alpes region for now).
''')
'''
)
atmo_aura_api_token = models.CharField(max_length=100,
verbose_name=_('ATMO AURA API token'),
blank=True, null=True)
atmo_aura_api_token = models.CharField(
max_length=100, verbose_name=_('ATMO AURA API token'), blank=True, null=True
)
class Meta:
verbose_name = _('Air Quality')
@endpoint(pattern='^(?P<country>\w+)/(?P<city>\w+)/$',
example_pattern='{country}/{city}/',
parameters={
'country': {'description': _('Country Code'), 'example_value': 'fr'},
'city': {'description': _('City Name'), 'example_value': 'lyon'},
})
@endpoint(
pattern='^(?P<country>\w+)/(?P<city>\w+)/$',
example_pattern='{country}/{city}/',
parameters={
'country': {'description': _('Country Code'), 'example_value': 'fr'},
'city': {'description': _('City Name'), 'example_value': 'lyon'},
},
)
def details(self, request, country, city, **kwargs):
methods = {
('fr', 'albertville'): 'air_rhonealpes',
@ -82,7 +86,8 @@ class AirQuality(BaseResource):
'vienne': '38544',
}
insee_code = insee_codes.get(city.lower())
response = self.requests.get('https://api.atmo-aura.fr/communes/%s/indices' % insee_code,
response = self.requests.get(
'https://api.atmo-aura.fr/communes/%s/indices' % insee_code,
params={'api_token': self.atmo_aura_api_token},
)
json_response = response.json()
@ -106,12 +111,13 @@ class AirQuality(BaseResource):
break
if 'latest' in response_data:
comment_response = self.requests.get('https://api.atmo-aura.fr/commentaire',
params={
'date': response_data['latest']['date'],
'api_token': self.atmo_aura_api_token,
}
)
comment_response = self.requests.get(
'https://api.atmo-aura.fr/commentaire',
params={
'date': response_data['latest']['date'],
'api_token': self.atmo_aura_api_token,
},
)
if comment_response.ok:
response_data['comment'] = comment_response.json().get('commentaire')

View File

@ -17,13 +17,29 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='APIEntreprise',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('title', models.CharField(max_length=50, verbose_name='Title')),
('description', models.TextField(verbose_name='Description')),
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
('url', models.URLField(default=b'https://entreprise.api.gouv.fr/v2/', max_length=256, verbose_name='API URL')),
(
'url',
models.URLField(
default=b'https://entreprise.api.gouv.fr/v2/', max_length=256, verbose_name='API URL'
),
),
('token', models.CharField(max_length=1024, verbose_name='API token')),
('users', models.ManyToManyField(blank=True, related_name='_apientreprise_users_+', related_query_name='+', to='base.ApiUser')),
(
'users',
models.ManyToManyField(
blank=True,
related_name='_apientreprise_users_+',
related_query_name='+',
to='base.ApiUser',
),
),
],
options={
'verbose_name': 'API Entreprise',

View File

@ -15,8 +15,8 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='apientreprise',
name='recipient',
field=models.CharField(default='', max_length=1024, verbose_name='Recipient',
help_text='default value'
field=models.CharField(
default='', max_length=1024, verbose_name='Recipient', help_text='default value'
),
preserve_default=False,
),

View File

@ -69,7 +69,7 @@ def normalize_results(data):
if tstamp > 0:
try:
aware_date = make_aware(datetime.fromtimestamp(int(data[key])))
timestamp_to_datetime[key[:-len('timestamp')] + 'datetime'] = aware_date
timestamp_to_datetime[key[: -len('timestamp')] + 'datetime'] = aware_date
except (ValueError, TypeError):
pass
# add converted timestamps to initial data
@ -81,8 +81,9 @@ class APIEntreprise(BaseResource):
url = models.URLField(_('API URL'), max_length=256, default='https://entreprise.api.gouv.fr/v2/')
token = models.CharField(max_length=1024, verbose_name=_('API token'))
recipient = models.CharField(max_length=1024, verbose_name=_('Recipient'), blank=False,
help_text=_('default value'))
recipient = models.CharField(
max_length=1024, verbose_name=_('Recipient'), blank=False, help_text=_('default value')
)
category = _('Business Process Connectors')
@ -102,20 +103,20 @@ class APIEntreprise(BaseResource):
try:
response = self.requests.get(url, data=params, cache_duration=300)
except requests.RequestException as e:
raise APIError(u'API-entreprise connection error: %s' %
exception_to_text(e), data=[])
raise APIError(u'API-entreprise connection error: %s' % exception_to_text(e), data=[])
try:
data = response.json()
except ValueError as e:
content = response.text[:1000]
raise APIError(
u'API-entreprise returned non-JSON content with status %s: %s' %
(response.status_code, content),
u'API-entreprise returned non-JSON content with status %s: %s'
% (response.status_code, content),
data={
'status_code': response.status_code,
'exception': exception_to_text(e),
'content': content,
})
},
)
if response.status_code != 200:
if data.get('error') == 'not_found':
return {
@ -123,12 +124,12 @@ class APIEntreprise(BaseResource):
'err_desc': data.get('message', 'not-found'),
}
raise APIError(
u'API-entreprise returned a non 200 status %s: %s' %
(response.status_code, data),
u'API-entreprise returned a non 200 status %s: %s' % (response.status_code, data),
data={
'status_code': response.status_code,
'content': data,
})
},
)
normalize_results(data)
return {
'err': 0,
@ -138,13 +139,10 @@ class APIEntreprise(BaseResource):
# description of common endpoint parameters
ASSOCIATION_PARAM = {
'description': _('association SIREN or RNA/WALDEC number'),
'example_value': '44317013900036'
'example_value': '44317013900036',
}
CONTEXT_PARAM = {
'description': _('request context: MPS, APS...'),
'example_value': 'APS'
}
CONTEXT_PARAM = {'description': _('request context: MPS, APS...'), 'example_value': 'APS'}
MONTH_PARAM = {
'description': _('requested month'),
@ -153,12 +151,12 @@ class APIEntreprise(BaseResource):
OBJECT_PARAM = {
'description': _('request object: form number, file identifier...'),
'example_value': '42'
'example_value': '42',
}
RECIPIENT_PARAM = {
'description': _('request recipient: usually customer number'),
'example_value': '44317013900036'
'example_value': '44317013900036',
}
SIREN_PARAM = {
@ -166,26 +164,25 @@ class APIEntreprise(BaseResource):
'example_value': '443170139',
}
SIRET_PARAM = {
'description': _('firms SIRET number'),
'example_value': '44317013900036'
}
SIRET_PARAM = {'description': _('firms SIRET number'), 'example_value': '44317013900036'}
YEAR_PARAM = {
'description': _('requested year'),
'example_value': '2019',
}
@endpoint(perm='can_access',
pattern=r'(?P<association_id>\w+)/$',
example_pattern='{association_id}/',
description=_('Get association\'s documents'),
parameters={
'association_id': ASSOCIATION_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM
})
@endpoint(
perm='can_access',
pattern=r'(?P<association_id>\w+)/$',
example_pattern='{association_id}/',
description=_('Get association\'s documents'),
parameters={
'association_id': ASSOCIATION_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
},
)
def documents_associations(self, request, association_id, **kwargs):
data = []
resp = self.get('documents_associations/%s/' % association_id, **kwargs)
@ -193,19 +190,24 @@ class APIEntreprise(BaseResource):
# ignore documents with no type
if not item.get('type'):
continue
signature_elements = {'url': item['url'],
'context': kwargs['context'],
'object': kwargs['object'],
'recipient': kwargs['recipient']}
signature_elements = {
'url': item['url'],
'context': kwargs['context'],
'object': kwargs['object'],
'recipient': kwargs['recipient'],
}
signature = signing.dumps(signature_elements)
document_url = request.build_absolute_uri(
reverse('generic-endpoint',
kwargs={
'connector': self.get_connector_slug(),
'slug': self.slug,
'endpoint': 'document',
'rest': '%s/%s/' % (association_id, signature),
}))
reverse(
'generic-endpoint',
kwargs={
'connector': self.get_connector_slug(),
'slug': self.slug,
'endpoint': 'document',
'rest': '%s/%s/' % (association_id, signature),
},
)
)
item['id'] = item['timestamp']
item['text'] = item['type']
item['url'] = document_url
@ -214,19 +216,21 @@ class APIEntreprise(BaseResource):
data.sort(key=lambda i: i['id'])
return {'err': 0, 'data': data}
@endpoint(pattern=r'(?P<association_id>\w+)/(?P<document_id>[\:\w-]+)/$',
example_pattern='{association_id}/{document_id}/',
description=_('Get association\'s document'),
parameters={
'association_id': ASSOCIATION_PARAM,
'document_id': {
'description': _('document id'),
'example_value': 'A1500660325',
},
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
})
@endpoint(
pattern=r'(?P<association_id>\w+)/(?P<document_id>[\:\w-]+)/$',
example_pattern='{association_id}/{document_id}/',
description=_('Get association\'s document'),
parameters={
'association_id': ASSOCIATION_PARAM,
'document_id': {
'description': _('document id'),
'example_value': 'A1500660325',
},
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
},
)
def document(self, request, association_id, document_id, **kwargs):
try:
params = signing.loads(document_id, max_age=DOCUMENT_SIGNATURE_MAX_AGE)
@ -237,20 +241,22 @@ class APIEntreprise(BaseResource):
return HttpResponse(response, content_type='application/pdf')
raise Http404('document not found')
@endpoint(name='document_association',
pattern=r'(?P<association_id>\w+)/get-last/$',
example_pattern='{association_id}/get-last/',
description=_('Get association\'s last document of type'),
parameters={
'association_id': ASSOCIATION_PARAM,
'document_type': {
'description': _('document type'),
'example_value': 'Statuts',
},
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
})
@endpoint(
name='document_association',
pattern=r'(?P<association_id>\w+)/get-last/$',
example_pattern='{association_id}/get-last/',
description=_('Get association\'s last document of type'),
parameters={
'association_id': ASSOCIATION_PARAM,
'document_type': {
'description': _('document type'),
'example_value': 'Statuts',
},
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
},
)
def get_last_document_of_type(self, request, association_id, document_type, **kwargs):
document = None
resp = self.get('documents_associations/%s/' % association_id, **kwargs)
@ -260,46 +266,49 @@ class APIEntreprise(BaseResource):
document = documents[-1]
return {'data': document}
@endpoint(perm='can_access',
pattern=r'(?P<siren>\w+)/$',
example_pattern='{siren}/',
description=_('Get firm\'s data from Infogreffe'),
parameters={
'siren': SIREN_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
})
@endpoint(
perm='can_access',
pattern=r'(?P<siren>\w+)/$',
example_pattern='{siren}/',
description=_('Get firm\'s data from Infogreffe'),
parameters={
'siren': SIREN_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
},
)
def extraits_rcs(self, request, siren, **kwargs):
return self.get('extraits_rcs_infogreffe/%s/' % siren, **kwargs)
@endpoint(perm='can_access',
pattern=r'(?P<association_id>\w+)/$',
example_pattern='{association_id}/',
description=_('Get association\'s related informations'),
parameters={
'association_id': ASSOCIATION_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
})
@endpoint(
perm='can_access',
pattern=r'(?P<association_id>\w+)/$',
example_pattern='{association_id}/',
description=_('Get association\'s related informations'),
parameters={
'association_id': ASSOCIATION_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
},
)
def associations(self, request, association_id, **kwargs):
return self.get('associations/%s/' % association_id, **kwargs)
@endpoint(perm='can_access',
pattern=r'(?P<siren>\w+)/$',
example_pattern='{siren}/',
description=_('Get firm\'s related informations'),
parameters={
'siren': SIREN_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
'include_private': {
'description': _('Include private informations'),
'example_value': 'true'
}
})
@endpoint(
perm='can_access',
pattern=r'(?P<siren>\w+)/$',
example_pattern='{siren}/',
description=_('Get firm\'s related informations'),
parameters={
'siren': SIREN_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
'include_private': {'description': _('Include private informations'), 'example_value': 'true'},
},
)
def entreprises(self, request, siren, include_private=False, **kwargs):
if len(siren) != 9:
raise APIError(_('invalid SIREN length (must be 9 characters)'))
@ -307,60 +316,68 @@ class APIEntreprise(BaseResource):
kwargs['non_diffusables'] = True
return self.get('entreprises/%s/' % siren, **kwargs)
@endpoint(perm='can_access',
methods=['get'],
pattern=r'(?P<siret>\w+)/$',
example_pattern='{siret}/',
description_get=_('Get firms\'s related informations'),
parameters={
'siret': SIRET_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
})
@endpoint(
perm='can_access',
methods=['get'],
pattern=r'(?P<siret>\w+)/$',
example_pattern='{siret}/',
description_get=_('Get firms\'s related informations'),
parameters={
'siret': SIRET_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
},
)
def etablissements(self, request, siret, **kwargs):
return self.get('etablissements/%s/' % siret, **kwargs)
@endpoint(perm='can_access',
methods=['get'],
pattern=r'(?P<siret>\w+)/$',
example_pattern='{siret}/',
description_get=_('Get firms\'s financial year informations'),
parameters={
'siret': SIRET_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
})
@endpoint(
perm='can_access',
methods=['get'],
pattern=r'(?P<siret>\w+)/$',
example_pattern='{siret}/',
description_get=_('Get firms\'s financial year informations'),
parameters={
'siret': SIRET_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
},
)
def exercices(self, request, siret, **kwargs):
return self.get('exercices/%s/' % siret, **kwargs)
@endpoint(perm='can_access',
pattern=r'(?P<siren>\w+)/$',
example_pattern='{siren}/',
description=_('Get firm\'s annual workforce data'),
parameters={
'siren': SIREN_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
})
@endpoint(
perm='can_access',
pattern=r'(?P<siren>\w+)/$',
example_pattern='{siren}/',
description=_('Get firm\'s annual workforce data'),
parameters={
'siren': SIREN_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
},
)
def effectifs_annuels_acoss_covid(self, request, siren, **kwargs):
if len(siren) != 9:
raise APIError(_('invalid SIREN length (must be 9 characters)'))
return self.get('effectifs_annuels_acoss_covid/%s/' % siren, **kwargs)
@endpoint(perm='can_access',
pattern=r'(?P<year>\w+)/(?P<month>\w+)/(?P<siren>\w+)/$',
description=_('Get firm\'s monthly workforce data, by SIREN'),
parameters={
'year': YEAR_PARAM,
'month': MONTH_PARAM,
'siren': SIREN_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
})
@endpoint(
perm='can_access',
pattern=r'(?P<year>\w+)/(?P<month>\w+)/(?P<siren>\w+)/$',
description=_('Get firm\'s monthly workforce data, by SIREN'),
parameters={
'year': YEAR_PARAM,
'month': MONTH_PARAM,
'siren': SIREN_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
},
)
def entreprise_effectifs_mensuels_acoss_covid(self, request, year, month, siren, **kwargs):
if len(siren) != 9:
raise APIError(_('invalid SIREN length (must be 9 characters)'))
@ -369,17 +386,19 @@ class APIEntreprise(BaseResource):
'effectifs_mensuels_acoss_covid/%s/%s/entreprise/%s/' % (year, month, siren), **kwargs
)
@endpoint(perm='can_access',
pattern=r'(?P<year>\w+)/(?P<month>\w+)/(?P<siret>\w+)/$',
description=_('Get firm\'s monthly workforce data, by SIRET'),
parameters={
'year': YEAR_PARAM,
'month': MONTH_PARAM,
'siret': SIRET_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
})
@endpoint(
perm='can_access',
pattern=r'(?P<year>\w+)/(?P<month>\w+)/(?P<siret>\w+)/$',
description=_('Get firm\'s monthly workforce data, by SIRET'),
parameters={
'year': YEAR_PARAM,
'month': MONTH_PARAM,
'siret': SIRET_PARAM,
'object': OBJECT_PARAM,
'context': CONTEXT_PARAM,
'recipient': RECIPIENT_PARAM,
},
)
def etablissement_effectifs_mensuels_acoss_covid(self, request, year, month, siret, **kwargs):
month = month.zfill(2)
return self.get(

View File

@ -29,7 +29,7 @@ KNOWN_ERRORS = {
'Il existe au moins un enfant pour lequel il existe un droit sur le dossier et/ou à la période demandée',
'Il existe des droits pour la prestation sélectionnée sur le dossier et/ou la période demandée',
'Il existe des droits pour la prestation sélectionnée sur le dossier et/ou la période demandée (après date du jour)',
'Lopérateurs téléphonique» ne propose pas de raccordement SMS avec un prestataire externe (raccordement avec un numéro court). '
'Lopérateurs téléphonique» ne propose pas de raccordement SMS avec un prestataire externe (raccordement avec un numéro court). ',
},
500: {
'Les informations souhaitées sont momentanément indisponibles. Merci de renouveler votre demande ultérieurement.',
@ -42,6 +42,6 @@ KNOWN_ERRORS = {
"Votre demande n'a pu aboutir en raison d'un incident technique momentané. Merci de renouveler votre demande ultérieurement.",
"Votre demande n'a pu aboutir en raison d'une erreur fonctionnelle lié à l'appel au service IMC.",
"Votre demande n'a pu aboutir en raison d'une erreur technique lié à l'appel au service IMC.",
"Votre demande na pu aboutir en raison d'un problème technique lié aux données entrantes du webservice. Merci de renouveler votre demande ultérieurement."
}
"Votre demande na pu aboutir en raison d'un problème technique lié aux données entrantes du webservice. Merci de renouveler votre demande ultérieurement.",
},
}

View File

@ -14,14 +14,50 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='APIParticulier',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('title', models.CharField(max_length=50, verbose_name='Title')),
('slug', models.SlugField(verbose_name='Identifier', unique=True)),
('description', models.TextField(verbose_name='Description')),
('log_level', models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])),
('_platform', models.CharField(choices=[(b'prod', 'Production'), (b'test', 'Test')], max_length=8, verbose_name='Platform')),
('_api_key', models.CharField(default=b'', max_length=64, verbose_name='API key', blank=True)),
('users', models.ManyToManyField(to='base.ApiUser', related_name='_apiparticulier_users_+', related_query_name='+', blank=True)),
(
'log_level',
models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Log Level',
choices=[
(b'NOTSET', b'NOTSET'),
(b'DEBUG', b'DEBUG'),
(b'INFO', b'INFO'),
(b'WARNING', b'WARNING'),
(b'ERROR', b'ERROR'),
(b'CRITICAL', b'CRITICAL'),
],
),
),
(
'_platform',
models.CharField(
choices=[(b'prod', 'Production'), (b'test', 'Test')],
max_length=8,
verbose_name='Platform',
),
),
(
'_api_key',
models.CharField(default=b'', max_length=64, verbose_name='API key', blank=True),
),
(
'users',
models.ManyToManyField(
to='base.ApiUser',
related_name='_apiparticulier_users_+',
related_query_name='+',
blank=True,
),
),
],
options={
'abstract': False,

View File

@ -42,11 +42,7 @@ from .known_errors import KNOWN_ERRORS
class APIParticulier(BaseResource):
PLATFORMS = [
{
'name': 'prod',
'label': _('Production'),
'url': 'https://particulier.api.gouv.fr/api/'
},
{'name': 'prod', 'label': _('Production'), 'url': 'https://particulier.api.gouv.fr/api/'},
{
'name': 'test',
'label': _('Test'),
@ -58,13 +54,10 @@ class APIParticulier(BaseResource):
platform = models.CharField(
verbose_name=_('Platform'),
max_length=8,
choices=[(key, platform['label']) for key, platform in PLATFORMS.items()])
choices=[(key, platform['label']) for key, platform in PLATFORMS.items()],
)
api_key = models.CharField(
max_length=256,
default='',
blank=True,
verbose_name=_('API key'))
api_key = models.CharField(max_length=256, default='', blank=True, verbose_name=_('API key'))
log_requests_errors = False
@ -79,28 +72,24 @@ class APIParticulier(BaseResource):
if user:
headers['X-User'] = user
try:
response = self.requests.get(
url,
headers=headers,
timeout=5,
**kwargs)
response = self.requests.get(url, headers=headers, timeout=5, **kwargs)
except requests.RequestException as e:
raise APIError(
u'API-particulier platform "%s" connection error: %s' %
(self.platform, exception_to_text(e)),
u'API-particulier platform "%s" connection error: %s' % (self.platform, exception_to_text(e)),
log_error=True,
data={
'code': 'connection-error',
'platform': self.platform,
'error': six.text_type(e),
})
},
)
try:
data = response.json()
except JSONDecodeError as e:
content = repr(response.content[:1000])
raise APIError(
u'API-particulier platform "%s" returned non-JSON content with status %s: %s' %
(self.platform, response.status_code, content),
u'API-particulier platform "%s" returned non-JSON content with status %s: %s'
% (self.platform, response.status_code, content),
log_error=True,
data={
'code': 'non-json',
@ -108,7 +97,8 @@ class APIParticulier(BaseResource):
'exception': six.text_type(e),
'platform': self.platform,
'content': content,
})
},
)
if response.status_code != 200:
# avoid logging http errors about non-transport failure
message = data.get('message', '')
@ -120,162 +110,180 @@ class APIParticulier(BaseResource):
'status_code': response.status_code,
'platform': self.platform,
'content': data,
})
},
)
raise APIError(
u'API-particulier platform "%s" returned a non 200 status %s: %s' %
(self.platform, response.status_code, data),
u'API-particulier platform "%s" returned a non 200 status %s: %s'
% (self.platform, response.status_code, data),
log_error=True,
data={
'code': 'non-200',
'status_code': response.status_code,
'platform': self.platform,
'content': data,
})
},
)
return {
'err': 0,
'data': data,
}
@endpoint(perm='can_access',
show=False,
description=_('Get citizen\'s fiscal informations'),
parameters={
'numero_fiscal': {
'description': _('fiscal identifier'),
'example_value': '1562456789521',
},
'reference_avis': {
'description': _('tax notice number'),
'example_value': '1512456789521',
},
'user': {
'description': _('requesting user'),
'example_value': 'John Doe (agent)',
},
})
@endpoint(
perm='can_access',
show=False,
description=_('Get citizen\'s fiscal informations'),
parameters={
'numero_fiscal': {
'description': _('fiscal identifier'),
'example_value': '1562456789521',
},
'reference_avis': {
'description': _('tax notice number'),
'example_value': '1512456789521',
},
'user': {
'description': _('requesting user'),
'example_value': 'John Doe (agent)',
},
},
)
def impots_svair(self, request, numero_fiscal, reference_avis, user=None):
# deprecated endpoint
return self.v2_avis_imposition(request, numero_fiscal, reference_avis, user=user)
@endpoint(name='avis-imposition',
perm='can_access',
description=_('Get citizen\'s fiscal informations'),
parameters={
'numero_fiscal': {
'description': _('fiscal identifier'),
'example_value': '1562456789521',
},
'reference_avis': {
'description': _('tax notice number'),
'example_value': '1512456789521',
},
'user': {
'description': _('requesting user'),
'example_value': 'John Doe (agent)',
},
},
json_schema_response={
'type': 'object',
'required': ['err'],
'properties': {
'err': {'enum': [0, 1]},
'declarant1': {
'type': 'object',
'properties': {
'nom': {'type': 'string'},
'nomNaissance': {'type': 'string'},
'prenoms': {'type': 'string'},
'dateNaissance': {'type': 'string'}
},
},
'declarant2': {
'type': 'object',
'properties': {
'nom': {'type': 'string'},
'nomNaissance': {'type': 'string'},
'prenoms': {'type': 'string'},
'dateNaissance': {'type': 'string'}
}
},
'foyerFiscal': {
'type': 'object',
'properties': {
'annee': {'type': 'integer'},
'adresse': {'type': 'string'},
}
},
'dateRecouvrement': {'type': 'string', 'pattern': r'^\d{1,2}/\d{1,2}/\d{4}$'},
'dateEtablissement': {'type': 'string', 'pattern': r'^\d{1,2}/\d{1,2}/\d{4}$'},
'nombreParts': {'type': 'integer'},
'situationFamille': {'type': 'string'},
'nombrePersonnesCharge': {'type': 'integer'},
'revenuBrutGlobal': {'type': 'integer'},
'revenuImposable': {'type': 'integer'},
'impotRevenuNetAvantCorrections': {'type': 'integer'},
'montantImpot': {'type': 'integer'},
'revenuFiscalReference': {'type': 'integer'},
'anneeImpots': {'type': 'string', 'pattern': r'^[0-9]{4}$'},
'anneeRevenus': {'type': 'string', 'pattern': r'^[0-9]{4}$'},
'erreurCorrectif': {'type': 'string'},
'situationPartielle': {'type': 'string'}
}
})
@endpoint(
name='avis-imposition',
perm='can_access',
description=_('Get citizen\'s fiscal informations'),
parameters={
'numero_fiscal': {
'description': _('fiscal identifier'),
'example_value': '1562456789521',
},
'reference_avis': {
'description': _('tax notice number'),
'example_value': '1512456789521',
},
'user': {
'description': _('requesting user'),
'example_value': 'John Doe (agent)',
},
},
json_schema_response={
'type': 'object',
'required': ['err'],
'properties': {
'err': {'enum': [0, 1]},
'declarant1': {
'type': 'object',
'properties': {
'nom': {'type': 'string'},
'nomNaissance': {'type': 'string'},
'prenoms': {'type': 'string'},
'dateNaissance': {'type': 'string'},
},
},
'declarant2': {
'type': 'object',
'properties': {
'nom': {'type': 'string'},
'nomNaissance': {'type': 'string'},
'prenoms': {'type': 'string'},
'dateNaissance': {'type': 'string'},
},
},
'foyerFiscal': {
'type': 'object',
'properties': {
'annee': {'type': 'integer'},
'adresse': {'type': 'string'},
},
},
'dateRecouvrement': {'type': 'string', 'pattern': r'^\d{1,2}/\d{1,2}/\d{4}$'},
'dateEtablissement': {'type': 'string', 'pattern': r'^\d{1,2}/\d{1,2}/\d{4}$'},
'nombreParts': {'type': 'integer'},
'situationFamille': {'type': 'string'},
'nombrePersonnesCharge': {'type': 'integer'},
'revenuBrutGlobal': {'type': 'integer'},
'revenuImposable': {'type': 'integer'},
'impotRevenuNetAvantCorrections': {'type': 'integer'},
'montantImpot': {'type': 'integer'},
'revenuFiscalReference': {'type': 'integer'},
'anneeImpots': {'type': 'string', 'pattern': r'^[0-9]{4}$'},
'anneeRevenus': {'type': 'string', 'pattern': r'^[0-9]{4}$'},
'erreurCorrectif': {'type': 'string'},
'situationPartielle': {'type': 'string'},
},
},
)
def v2_avis_imposition(self, request, numero_fiscal, reference_avis, user=None):
numero_fiscal = numero_fiscal.strip()[:13]
reference_avis = reference_avis.strip()[:13]
if len(numero_fiscal) < 13 or len(reference_avis) < 13:
raise APIError('bad numero_fiscal or reference_avis, must be 13 chars long', status_code=400)
return self.get('v2/avis-imposition', params={
'numeroFiscal': numero_fiscal,
'referenceAvis': reference_avis,
}, user=user)
return self.get(
'v2/avis-imposition',
params={
'numeroFiscal': numero_fiscal,
'referenceAvis': reference_avis,
},
user=user,
)
@endpoint(perm='can_access',
show=False,
description=_('Get family allowances recipient informations'),
parameters={
'code_postal': {
'description': _('postal code'),
'example_value': '99148',
},
'numero_allocataire': {
'description': _('recipient identifier'),
'example_value': '0000354',
},
'user': {
'description': _('requesting user'),
'example_value': 'John Doe (agent)',
},
})
@endpoint(
perm='can_access',
show=False,
description=_('Get family allowances recipient informations'),
parameters={
'code_postal': {
'description': _('postal code'),
'example_value': '99148',
},
'numero_allocataire': {
'description': _('recipient identifier'),
'example_value': '0000354',
},
'user': {
'description': _('requesting user'),
'example_value': 'John Doe (agent)',
},
},
)
def caf_famille(self, request, code_postal, numero_allocataire, user=None):
# deprecated endpoint
return self.v2_situation_familiale(request, code_postal, numero_allocataire, user=user)
@endpoint(name='situation-familiale',
perm='can_access',
description=_('Get family allowances recipient informations'),
parameters={
'code_postal': {
'description': _('postal code'),
'example_value': '99148',
},
'numero_allocataire': {
'description': _('recipient identifier'),
'example_value': '0000354',
},
'user': {
'description': _('requesting user'),
'example_value': 'John Doe (agent)',
},
})
@endpoint(
name='situation-familiale',
perm='can_access',
description=_('Get family allowances recipient informations'),
parameters={
'code_postal': {
'description': _('postal code'),
'example_value': '99148',
},
'numero_allocataire': {
'description': _('recipient identifier'),
'example_value': '0000354',
},
'user': {
'description': _('requesting user'),
'example_value': 'John Doe (agent)',
},
},
)
def v2_situation_familiale(self, request, code_postal, numero_allocataire, user=None):
if not code_postal.strip() or not numero_allocataire.strip():
raise APIError('missing code_postal or numero_allocataire', status_code=400)
return self.get('v2/composition-familiale', params={
'codePostal': code_postal,
'numeroAllocataire': numero_allocataire,
}, user=user)
return self.get(
'v2/composition-familiale',
params={
'codePostal': code_postal,
'numeroAllocataire': numero_allocataire,
},
user=user,
)
category = _('Business Process Connectors')

View File

@ -14,13 +14,36 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Arcgis',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('title', models.CharField(verbose_name='Title', max_length=50)),
('slug', models.SlugField(verbose_name='Identifier', unique=True)),
('description', models.TextField(verbose_name='Description')),
('log_level', models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])),
(
'log_level',
models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Log Level',
choices=[
(b'NOTSET', b'NOTSET'),
(b'DEBUG', b'DEBUG'),
(b'INFO', b'INFO'),
(b'WARNING', b'WARNING'),
(b'ERROR', b'ERROR'),
(b'CRITICAL', b'CRITICAL'),
],
),
),
('base_url', models.CharField(max_length=256, verbose_name='SIG Url')),
('users', models.ManyToManyField(to='base.ApiUser', related_name='_arcgis_users_+', related_query_name='+', blank=True)),
(
'users',
models.ManyToManyField(
to='base.ApiUser', related_name='_arcgis_users_+', related_query_name='+', blank=True
),
),
],
options={
'verbose_name': 'Arcgis Webservice',

View File

@ -29,7 +29,9 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='arcgis',
name='client_certificate',
field=models.FileField(blank=True, null=True, upload_to=b'', verbose_name='TLS client certificate'),
field=models.FileField(
blank=True, null=True, upload_to=b'', verbose_name='TLS client certificate'
),
),
migrations.AddField(
model_name='arcgis',
@ -54,6 +56,18 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='arcgis',
name='log_level',
field=models.CharField(choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')], default=b'INFO', max_length=10, verbose_name='Log Level'),
field=models.CharField(
choices=[
(b'NOTSET', b'NOTSET'),
(b'DEBUG', b'DEBUG'),
(b'INFO', b'INFO'),
(b'WARNING', b'WARNING'),
(b'ERROR', b'ERROR'),
(b'CRITICAL', b'CRITICAL'),
],
default=b'INFO',
max_length=10,
verbose_name='Log Level',
),
),
]

View File

@ -18,17 +18,51 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Query',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('name', models.CharField(max_length=128, verbose_name='Name')),
('slug', models.SlugField(max_length=128, verbose_name='Slug')),
('description', models.TextField(blank=True, verbose_name='Description')),
('folder', models.CharField(blank=True, max_length=64, verbose_name='ArcGis Folder')),
('service', models.CharField(max_length=64, verbose_name='ArcGis Service')),
('layer', models.CharField(blank=True, max_length=8, verbose_name='ArcGis Layer')),
('where', models.TextField(blank=True, help_text="<span>Use syntax <tt>{name}</tt> to introduce a string parameter and <tt>{name:d}</tt> for a decimal parameter. ex.:<br/><tt>adress LIKE ('%' || UPPER({adress}) || '%')</tt><br/><tt>population < {population:d}</tt></span>", validators=[passerelle.apps.arcgis.models.validate_where], verbose_name='ArcGis Where Clause')),
('id_template', models.TextField(blank=True, help_text="Use Django's template syntax. Attributes can be accessed through {{ attributes.name }}", validators=[passerelle.utils.templates.validate_template], verbose_name='Id template')),
('text_template', models.TextField(blank=True, help_text="Use Django's template syntax. Attributes can be accessed through {{ attributes.name }}", validators=[passerelle.utils.templates.validate_template], verbose_name='Text template')),
('resource', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='arcgis.ArcGIS', verbose_name='Resource')),
(
'where',
models.TextField(
blank=True,
help_text="<span>Use syntax <tt>{name}</tt> to introduce a string parameter and <tt>{name:d}</tt> for a decimal parameter. ex.:<br/><tt>adress LIKE ('%' || UPPER({adress}) || '%')</tt><br/><tt>population < {population:d}</tt></span>",
validators=[passerelle.apps.arcgis.models.validate_where],
verbose_name='ArcGis Where Clause',
),
),
(
'id_template',
models.TextField(
blank=True,
help_text="Use Django's template syntax. Attributes can be accessed through {{ attributes.name }}",
validators=[passerelle.utils.templates.validate_template],
verbose_name='Id template',
),
),
(
'text_template',
models.TextField(
blank=True,
help_text="Use Django's template syntax. Attributes can be accessed through {{ attributes.name }}",
validators=[passerelle.utils.templates.validate_template],
verbose_name='Text template',
),
),
(
'resource',
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to='arcgis.ArcGIS',
verbose_name='Resource',
),
),
],
options={
'ordering': ['name'],

View File

@ -16,7 +16,9 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='arcgis',
name='client_certificate',
field=models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS client certificate'),
field=models.FileField(
blank=True, null=True, upload_to='', verbose_name='TLS client certificate'
),
),
migrations.AlterField(
model_name='arcgis',
@ -26,6 +28,11 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='query',
name='resource',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='queries', to='arcgis.ArcGIS', verbose_name='Resource'),
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name='queries',
to='arcgis.ArcGIS',
verbose_name='Resource',
),
),
]

View File

@ -46,44 +46,61 @@ class ArcGIS(BaseResource, HTTPResource):
class Meta:
verbose_name = _('ArcGIS REST API')
@endpoint(name='mapservice-query',
description=_('Map Service Query'),
perm='can_access',
parameters={
'folder': {
'description': _('Folder name'),
'example_value': 'Specialty',
},
'service': {
'description': _('Service name'),
'example_value': 'ESRI_StateCityHighway_USA',
},
'layer': {
'description': _('Layer or table name'),
'example_value': '1',
},
'lat': {'description': _('Latitude')},
'lon': {'description': _('Longitude')},
'latmin': {'description': _('Minimal latitude (envelope)')},
'lonmin': {'description': _('Minimal longitude (envelope)')},
'latmax': {'description': _('Maximal latitude (envelope)')},
'lonmax': {'description': _('Maximal longitude (envelope)')},
'q': {'description': _('Search text in display field')},
'template': {
'description': _('Django template for text attribute'),
'example_value': '{{ attributes.STATE_NAME }} ({{ attributes.STATE_ABBR }})',
},
'id_template': {
'description': _('Django template for id attribute'),
},
'full': {
'description': _('Returns all ArcGIS informations (geometry, metadata)'),
'type': 'bool',
},
})
def mapservice_query(self, request, service, layer='0', folder='', lat=None, lon=None,
latmin=None, lonmin=None, latmax=None, lonmax=None, q=None,
template=None, id_template=None, full=False, **kwargs):
@endpoint(
name='mapservice-query',
description=_('Map Service Query'),
perm='can_access',
parameters={
'folder': {
'description': _('Folder name'),
'example_value': 'Specialty',
},
'service': {
'description': _('Service name'),
'example_value': 'ESRI_StateCityHighway_USA',
},
'layer': {
'description': _('Layer or table name'),
'example_value': '1',
},
'lat': {'description': _('Latitude')},
'lon': {'description': _('Longitude')},
'latmin': {'description': _('Minimal latitude (envelope)')},
'lonmin': {'description': _('Minimal longitude (envelope)')},
'latmax': {'description': _('Maximal latitude (envelope)')},
'lonmax': {'description': _('Maximal longitude (envelope)')},
'q': {'description': _('Search text in display field')},
'template': {
'description': _('Django template for text attribute'),
'example_value': '{{ attributes.STATE_NAME }} ({{ attributes.STATE_ABBR }})',
},
'id_template': {
'description': _('Django template for id attribute'),
},
'full': {
'description': _('Returns all ArcGIS informations (geometry, metadata)'),
'type': 'bool',
},
},
)
def mapservice_query(
self,
request,
service,
layer='0',
folder='',
lat=None,
lon=None,
latmin=None,
lonmin=None,
latmax=None,
lonmax=None,
q=None,
template=None,
id_template=None,
full=False,
**kwargs,
):
url = urlparse.urljoin(self.base_url, 'services/')
if folder:
url = urlparse.urljoin(url, folder + '/')
@ -109,8 +126,7 @@ class ArcGIS(BaseResource, HTTPResource):
lonmin, latmin = float(lonmin), float(latmin)
lonmax, latmax = float(lonmax), float(latmax)
except (ValueError,):
raise APIError('<lonmin> <latmin> <lonmax> and <latmax> must be floats',
http_status=400)
raise APIError('<lonmin> <latmin> <lonmax> and <latmax> must be floats', http_status=400)
params['geometry'] = '{},{},{},{}'.format(lonmin, latmin, lonmax, latmax)
params['geometryType'] = 'esriGeometryEnvelope'
if q is not None:
@ -156,7 +172,7 @@ class ArcGIS(BaseResource, HTTPResource):
feature['id'] = '%s' % get_feature_attribute(feature, id_fieldname)
feature['text'] = '%s' % get_feature_attribute(feature, text_fieldname)
else:
feature['id'] = feature['text'] = '%d' % (n+1)
feature['id'] = feature['text'] = '%d' % (n + 1)
if template:
feature['text'] = render_to_string(template, feature)
if id_template:
@ -169,22 +185,30 @@ class ArcGIS(BaseResource, HTTPResource):
return {'data': data, 'metadata': infos}
return {'data': data}
@endpoint(name='district',
description=_('Districts in Nancy Town'),
parameters={
'lat': {'description': _('Latitude')},
'lon': {'description': _('Longitude')},
},
show=False)
@endpoint(
name='district',
description=_('Districts in Nancy Town'),
parameters={
'lat': {'description': _('Latitude')},
'lon': {'description': _('Longitude')},
},
show=False,
)
def district(self, request, lon=None, lat=None):
# deprecated endpoint
if 'NANCY_Grc' in self.base_url:
# Nancy URL used to contains folder, service and layer, remove them
self.base_url = 'https://geoservices.grand-nancy.org/arcgis/rest/'
features = self.mapservice_query(request, folder='public', service='NANCY_Grc', layer='0',
template='{{ attributes.NOM }}',
id_template='{{ attributes.NUMERO }}',
lon=lon, lat=lat)['data']
features = self.mapservice_query(
request,
folder='public',
service='NANCY_Grc',
layer='0',
template='{{ attributes.NOM }}',
id_template='{{ attributes.NUMERO }}',
lon=lon,
lat=lat,
)['data']
if not features:
raise APIError('No features found.')
for feature in features:
@ -197,15 +221,14 @@ class ArcGIS(BaseResource, HTTPResource):
@endpoint(
name='tile',
description=_('Tiles layer'),
pattern=r'^(?P<layer>[\w/]+)/(?P<zoom>\d+)/(?P<tile_x>\d+)/(?P<tile_y>\d+)\.png$')
pattern=r'^(?P<layer>[\w/]+)/(?P<zoom>\d+)/(?P<tile_x>\d+)/(?P<tile_y>\d+)\.png$',
)
def tile(self, request, layer, zoom, tile_x, tile_y):
zoom = int(zoom)
tile_x = int(tile_x)
tile_y = int(tile_y)
bbox = '%.6f,%.6f,%.6f,%.6f' % (
num2deg(tile_x, tile_y, zoom) +
num2deg(tile_x+1, tile_y+1, zoom))
bbox = '%.6f,%.6f,%.6f,%.6f' % (num2deg(tile_x, tile_y, zoom) + num2deg(tile_x + 1, tile_y + 1, zoom))
# imageSR=3857: default projection for leaflet
base_url = self.base_url
@ -213,19 +236,22 @@ class ArcGIS(BaseResource, HTTPResource):
base_url += '/'
return HttpResponse(
self.requests.get(
base_url +
'%s/MapServer/export' % layer +
'?dpi=96&format=png24&bboxSR=4326&imageSR=3857&' +
'transparent=true&size=256,256&f=image&' +
'bbox=%s' % bbox
base_url
+ '%s/MapServer/export' % layer
+ '?dpi=96&format=png24&bboxSR=4326&imageSR=3857&'
+ 'transparent=true&size=256,256&f=image&'
+ 'bbox=%s' % bbox
).content,
content_type='image/png')
content_type='image/png',
)
@endpoint(name='q',
description=_('Query'),
pattern=r'^(?P<query_slug>[\w:_-]+)/$',
perm='can_access',
show=False)
@endpoint(
name='q',
description=_('Query'),
pattern=r'^(?P<query_slug>[\w:_-]+)/$',
perm='can_access',
show=False,
)
def q(self, request, query_slug, q=None, full=False, **kwargs):
query = get_object_or_404(Query, resource=self, slug=query_slug)
refs = [ref for ref, _ in query.where_references]
@ -282,22 +308,12 @@ def validate_where(format_string):
class Query(BaseQuery):
resource = models.ForeignKey(
to=ArcGIS,
related_name='queries',
verbose_name=_('Resource'),
on_delete=models.CASCADE)
to=ArcGIS, related_name='queries', verbose_name=_('Resource'), on_delete=models.CASCADE
)
folder = models.CharField(
verbose_name=_('ArcGis Folder'),
max_length=64,
blank=True)
service = models.CharField(
verbose_name=_('ArcGis Service'),
max_length=64)
layer = models.CharField(
verbose_name=_('ArcGis Layer'),
max_length=8,
blank=True)
folder = models.CharField(verbose_name=_('ArcGis Folder'), max_length=64, blank=True)
service = models.CharField(verbose_name=_('ArcGis Service'), max_length=64)
layer = models.CharField(verbose_name=_('ArcGis Layer'), max_length=8, blank=True)
where = models.TextField(
verbose_name=_('ArcGis Where Clause'),
@ -308,19 +324,28 @@ class Query(BaseQuery):
'<span>Use syntax <tt>{name}</tt> to introduce a string '
'parameter and <tt>{name:d}</tt> for a decimal parameter. ex.:<br/>'
'<tt>adress LIKE (\'%\' || UPPER({adress}) || \'%\')</tt><br/>'
'<tt>population < {population:d}</tt></span>')))
'<tt>population < {population:d}</tt></span>'
)
),
)
id_template = models.TextField(
verbose_name=_('Id template'),
validators=[validate_template],
help_text=_('Use Django\'s template syntax. Attributes can be accessed through {{ attributes.name }}'),
blank=True)
help_text=_(
'Use Django\'s template syntax. Attributes can be accessed through {{ attributes.name }}'
),
blank=True,
)
text_template = models.TextField(
verbose_name=_('Text template'),
help_text=_('Use Django\'s template syntax. Attributes can be accessed through {{ attributes.name }}'),
help_text=_(
'Use Django\'s template syntax. Attributes can be accessed through {{ attributes.name }}'
),
validators=[validate_template],
blank=True)
blank=True,
)
delete_view = 'arcgis-query-delete'
edit_view = 'arcgis-query-edit'
@ -328,15 +353,20 @@ class Query(BaseQuery):
@property
def where_references(self):
if self.where:
return [(ref, int if spec and spec[-1] == 'd' else str)
for _, ref, spec, _ in SqlFormatter().parse(self.where) if ref is not None]
return [
(ref, int if spec and spec[-1] == 'd' else str)
for _, ref, spec, _ in SqlFormatter().parse(self.where)
if ref is not None
]
else:
return []
def q(self, request, q=None, full=False, **kwargs):
kwargs.update({
'service': self.service,
})
kwargs.update(
{
'service': self.service,
}
)
if self.id_template:
kwargs['id_template'] = self.id_template
if self.text_template:

View File

@ -19,10 +19,11 @@ from django.conf.urls import url
from . import views
management_urlpatterns = [
url(r'^(?P<slug>[\w,-]+)/query/new/$',
views.QueryNew.as_view(), name='arcgis-query-new'),
url(r'^(?P<slug>[\w,-]+)/query/(?P<pk>\d+)/$',
views.QueryEdit.as_view(), name='arcgis-query-edit'),
url(r'^(?P<slug>[\w,-]+)/query/(?P<pk>\d+)/delete/$',
views.QueryDelete.as_view(), name='arcgis-query-delete'),
url(r'^(?P<slug>[\w,-]+)/query/new/$', views.QueryNew.as_view(), name='arcgis-query-new'),
url(r'^(?P<slug>[\w,-]+)/query/(?P<pk>\d+)/$', views.QueryEdit.as_view(), name='arcgis-query-edit'),
url(
r'^(?P<slug>[\w,-]+)/query/(?P<pk>\d+)/delete/$',
views.QueryDelete.as_view(),
name='arcgis-query-delete',
),
]

View File

@ -14,15 +14,41 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='ArpegeECP',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('title', models.CharField(max_length=50, verbose_name='Title')),
('description', models.TextField(verbose_name='Description')),
('slug', models.SlugField(verbose_name='Identifier', unique=True)),
('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])),
(
'log_level',
models.CharField(
default=b'INFO',
max_length=10,
verbose_name='Log Level',
choices=[
(b'NOTSET', b'NOTSET'),
(b'DEBUG', b'DEBUG'),
(b'INFO', b'INFO'),
(b'WARNING', b'WARNING'),
(b'ERROR', b'ERROR'),
(b'CRITICAL', b'CRITICAL'),
],
),
),
('webservice_base_url', models.URLField(verbose_name='Webservice Base URL')),
('hawk_auth_id', models.CharField(max_length=64, verbose_name='Hawk Authentication id')),
('hawk_auth_key', models.CharField(max_length=64, verbose_name='Hawk Authentication secret')),
('users', models.ManyToManyField(to='base.ApiUser', related_name='_arpegeecp_users_+', related_query_name='+', blank=True)),
(
'users',
models.ManyToManyField(
to='base.ApiUser',
related_name='_arpegeecp_users_+',
related_query_name='+',
blank=True,
),
),
],
options={
'verbose_name': 'Arpege ECP',

View File

@ -56,8 +56,9 @@ class ArpegeECP(BaseResource):
def get_access_token(self, NameID):
url = urlparse.urljoin(self.webservice_base_url, 'LoginParSubOIDC')
try:
response = self.requests.post(url, auth=HawkAuth(self.hawk_auth_id, self.hawk_auth_key),
json={'subOIDC': NameID})
response = self.requests.post(
url, auth=HawkAuth(self.hawk_auth_id, self.hawk_auth_key), json={'subOIDC': NameID}
)
response.raise_for_status()
except RequestException as e:
raise APIError(u'Arpege server is down: %s' % e)
@ -73,7 +74,12 @@ class ArpegeECP(BaseResource):
return result['Data']['AccessToken']
raise APIError(u'%s (%s)' % (result.get('LibErreur'), result.get('CodErreur')))
@endpoint(name='api', pattern='^users/(?P<nameid>\w+)/forms$', perm='can_access', description='Returns user forms')
@endpoint(
name='api',
pattern='^users/(?P<nameid>\w+)/forms$',
perm='can_access',
description='Returns user forms',
)
def get_user_forms(self, request, nameid):
access_token = self.get_access_token(nameid)
url = urlparse.urljoin(self.webservice_base_url, 'DemandesUsager')
@ -98,14 +104,15 @@ class ArpegeECP(BaseResource):
receipt_date = parse_date(data_administratives['date_depot'])
except (KeyError, TypeError) as e:
raise APIError(u'Arpege error: %s %r' % (e, json.dumps(demand)[:1000]))
d = {'url': demand['url'],
'title': data_administratives.get('LibelleQualificationTypeDemande'),
'name': data_administratives.get('LibelleQualificationTypeDemande'),
'status': data_administratives.get('libelle_etat'),
'form_receipt_time': receipt_time,
'readable': True,
'form_receipt_datetime': timezone.datetime.combine(receipt_date, receipt_time),
'form_status_is_endpoint': data_administratives.get('date_fin_instruction') is not None,
d = {
'url': demand['url'],
'title': data_administratives.get('LibelleQualificationTypeDemande'),
'name': data_administratives.get('LibelleQualificationTypeDemande'),
'status': data_administratives.get('libelle_etat'),
'form_receipt_time': receipt_time,
'readable': True,
'form_receipt_datetime': timezone.datetime.combine(receipt_date, receipt_time),
'form_status_is_endpoint': data_administratives.get('date_fin_instruction') is not None,
}
data.append(d)
return {'data': data}

View File

@ -18,7 +18,10 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='AstreGS',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('title', models.CharField(max_length=50, verbose_name='Title')),
('description', models.TextField(verbose_name='Description')),
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
@ -28,7 +31,12 @@ class Migration(migrations.Migration):
('organism', models.CharField(max_length=32, verbose_name='Organisme')),
('budget', models.CharField(max_length=32, verbose_name='Budget')),
('exercice', models.CharField(max_length=32, verbose_name='Exercice')),
('users', models.ManyToManyField(blank=True, related_name='_astregs_users_+', related_query_name='+', to='base.ApiUser')),
(
'users',
models.ManyToManyField(
blank=True, related_name='_astregs_users_+', related_query_name='+', to='base.ApiUser'
),
),
],
options={
'verbose_name': 'AstresGS',
@ -37,11 +45,17 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Link',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('name_id', models.CharField(max_length=32)),
('association_id', models.CharField(max_length=32)),
('created', models.DateTimeField(auto_now_add=True)),
('resource', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='astregs.AstreGS')),
(
'resource',
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='astregs.AstreGS'),
),
],
),
migrations.AlterUniqueTogether(

View File

@ -46,18 +46,35 @@ ASSOCIATION_SCHEMA = {
],
"properties": {
"Financier": {"description": "financial association", "type": "string", "enum": ["true", "false"]},
"CodeFamille": {"description": "association family code", "type": "string",},
"CatTiers": {"description": "association category", "type": "string",},
"NomEnregistrement": {"description": "association name", "type": "string",},
"CodeFamille": {
"description": "association family code",
"type": "string",
},
"CatTiers": {
"description": "association category",
"type": "string",
},
"NomEnregistrement": {
"description": "association name",
"type": "string",
},
"StatutTiers": {
"description": "association status",
"type": "string",
"enum": ["PROPOSE", "VALIDE", "REFUSE", "BLOQUE", "A COMPLETER"],
},
"Type": {"description": "association type", "type": "string", "enum": ["D", "F", "*"]},
"NumeroSiret": {"description": "SIREN number", "type": "string",},
"NumeroSiretFin": {"description": "NIC number", "type": "string",},
"AdresseTitre": {"type": "string",},
"NumeroSiret": {
"description": "SIREN number",
"type": "string",
},
"NumeroSiretFin": {
"description": "NIC number",
"type": "string",
},
"AdresseTitre": {
"type": "string",
},
"AdresseIsAdresseDeCommande": {"type": "string", "enum": ["true", "false"]},
"AdresseIsAdresseDeFacturation": {"type": "string", "enum": ["true", "false"]},
},
@ -78,13 +95,27 @@ CONTACT_SCHEMA = {
"EncodeKeyStatut",
],
"properties": {
"CodeContact": {"type": "string",},
"CodeTitreCivilite": {"type": "string",},
"Nom": {"type": "string",},
"AdresseDestinataire": {"type": "string",},
"CodePostal": {"type": "string",},
"Ville": {"type": "string",},
"EncodeKeyStatut": {"type": "string",},
"CodeContact": {
"type": "string",
},
"CodeTitreCivilite": {
"type": "string",
},
"Nom": {
"type": "string",
},
"AdresseDestinataire": {
"type": "string",
},
"CodePostal": {
"type": "string",
},
"Ville": {
"type": "string",
},
"EncodeKeyStatut": {
"type": "string",
},
},
}
@ -105,21 +136,43 @@ DOCUMENT_SCHEMA = {
"document",
],
"properties": {
"Sujet": {"type": "string",},
"Entite": {"type": "string",},
"CodType": {"type": "string",},
"Type": {"type": "string",},
"hdnCodeTrt": {"type": "string",},
"EncodeKeyEntite": {"type": "string",},
"CodeDomaine": {"type": "string",},
"CodDom": {"type": "string",},
"Sujet": {
"type": "string",
},
"Entite": {
"type": "string",
},
"CodType": {
"type": "string",
},
"Type": {
"type": "string",
},
"hdnCodeTrt": {
"type": "string",
},
"EncodeKeyEntite": {
"type": "string",
},
"CodeDomaine": {
"type": "string",
},
"CodDom": {
"type": "string",
},
"document": {
"type": "object",
"required": ['filename', 'content_type', 'content'],
'properties': {
'filename': {'type': 'string',},
'content_type': {'type': 'string',},
'content': {'type': 'string',},
'filename': {
'type': 'string',
},
'content_type': {
'type': 'string',
},
'content': {
'type': 'string',
},
},
},
},
@ -141,14 +194,28 @@ GRANT_SCHEMA = {
"CodeServiceUtilisateur",
],
"properties": {
"Libelle": {"type": "string",},
"LibelleCourt": {"type": "string",},
"Libelle": {
"type": "string",
},
"LibelleCourt": {
"type": "string",
},
"ModGestion": {"type": "string", "enum": ["1", "2", "3", "4"]},
"TypeAide": {"type": "string",},
"Sens": {"type": "string",},
"CodeTiersDem": {"type": "string",},
"CodeServiceGestionnaire": {"type": "string",},
"CodeServiceUtilisateur": {"type": "string",},
"TypeAide": {
"type": "string",
},
"Sens": {
"type": "string",
},
"CodeTiersDem": {
"type": "string",
},
"CodeServiceGestionnaire": {
"type": "string",
},
"CodeServiceUtilisateur": {
"type": "string",
},
},
}
@ -159,11 +226,21 @@ INDANA_SCHEMA = {
"type": "object",
"required": ["CodeDossier", "CodeInd_1", "AnneeInd_1", "ValInd_1"],
"properties": {
"CodeDossier": {"type": "string",},
"CodeInd_1": {"type": "string",},
"AnneeInd_1": {"type": "string",},
"ValInd_1": {"type": "string",},
"IndAide": {"type": "string",},
"CodeDossier": {
"type": "string",
},
"CodeInd_1": {
"type": "string",
},
"AnneeInd_1": {
"type": "string",
},
"ValInd_1": {
"type": "string",
},
"IndAide": {
"type": "string",
},
},
}
@ -174,9 +251,15 @@ INDANA_KEY_SCHEMA = {
"type": "object",
"required": ["CodeDossier", "CodeInd_1", "AnneeInd_1"],
"properties": {
"CodeDossier": {"type": "string",},
"CodeInd_1": {"type": "string",},
"AnneeInd_1": {"type": "string",},
"CodeDossier": {
"type": "string",
},
"CodeInd_1": {
"type": "string",
},
"AnneeInd_1": {
"type": "string",
},
},
}
@ -197,51 +280,26 @@ TIERS_RIB_SCHEMA = {
"CodeStatut",
"CodeDevise",
"CodeIso2Pays",
"LibelleCompteEtranger"
"LibelleCompteEtranger",
],
"properties": {
"CodeDevise": {
"type": "string"
},
"CodeDomiciliation": {
"type": "string"
},
"CodeIso2Pays": {
"type": "string"
},
"CodePaiement": {
"type": "string"
},
"CodeDevise": {"type": "string"},
"CodeDomiciliation": {"type": "string"},
"CodeIso2Pays": {"type": "string"},
"CodePaiement": {"type": "string"},
"CodeStatut": {
"type": "string",
"enum": ["PROPOSE", "VALIDE", "REFUSE", "A COMPLETER",
"BLOQUE", "EN MODIFICATION"]
"enum": ["PROPOSE", "VALIDE", "REFUSE", "A COMPLETER", "BLOQUE", "EN MODIFICATION"],
},
"CodeTiers": {
"type": "string"
},
"IndicateurRibDefaut": {
"type": "string"
},
"LibelleCompteEtranger": {
"type": "string"
},
"LibelleCourt": {
"type": "string"
},
"NumeroIban": {
"type": "string"
},
"CleIban": {
"type": "string"
},
"CodeBic": {
"type": "string"
},
"IdRib": {
"type": "string"
}
}
"CodeTiers": {"type": "string"},
"IndicateurRibDefaut": {"type": "string"},
"LibelleCompteEtranger": {"type": "string"},
"LibelleCourt": {"type": "string"},
"NumeroIban": {"type": "string"},
"CleIban": {"type": "string"},
"CodeBic": {"type": "string"},
"IdRib": {"type": "string"},
},
}
TIERS_RIB_UPDATE_SCHEMA = {
@ -259,45 +317,24 @@ TIERS_RIB_UPDATE_SCHEMA = {
"CodeStatut",
"CodeDevise",
"CodeIso2Pays",
"LibelleCompteEtranger"
"LibelleCompteEtranger",
],
"properties": {
"CodeDevise": {
"type": "string"
},
"CodeDomiciliation": {
"type": "string"
},
"CodeIso2Pays": {
"type": "string"
},
"CodePaiement": {
"type": "string"
},
"CodeDevise": {"type": "string"},
"CodeDomiciliation": {"type": "string"},
"CodeIso2Pays": {"type": "string"},
"CodePaiement": {"type": "string"},
"CodeStatut": {
"type": "string",
"enum": ["PROPOSE", "VALIDE", "REFUSE", "A COMPLETER",
"BLOQUE", "EN MODIFICATION"]
"enum": ["PROPOSE", "VALIDE", "REFUSE", "A COMPLETER", "BLOQUE", "EN MODIFICATION"],
},
"IndicateurRibDefaut": {
"type": "string"
},
"LibelleCompteEtranger": {
"type": "string"
},
"LibelleCourt": {
"type": "string"
},
"NumeroIban": {
"type": "string"
},
"CleIban": {
"type": "string"
},
"CodeBic": {
"type": "string"
}
}
"IndicateurRibDefaut": {"type": "string"},
"LibelleCompteEtranger": {"type": "string"},
"LibelleCourt": {"type": "string"},
"NumeroIban": {"type": "string"},
"CleIban": {"type": "string"},
"CodeBic": {"type": "string"},
},
}
@ -434,8 +471,14 @@ class AstreGS(BaseResource):
description=_('Create link between user and association'),
perm='can_access',
parameters={
'NameID': {'description': _('Publik NameID'), 'example_value': 'xyz24d934',},
'association_id': {'description': _('Association ID'), 'example_value': '12345',},
'NameID': {
'description': _('Publik NameID'),
'example_value': 'xyz24d934',
},
'association_id': {
'description': _('Association ID'),
'example_value': '12345',
},
},
)
def link(self, request, NameID, association_id):
@ -464,7 +507,12 @@ class AstreGS(BaseResource):
@endpoint(
description=_('List user links'),
perm='can_access',
parameters={'NameID': {'description': _('Publik NameID'), 'example_value': 'xyz24d934',}},
parameters={
'NameID': {
'description': _('Publik NameID'),
'example_value': 'xyz24d934',
}
},
)
def links(self, request, NameID):
if not Link.objects.filter(resource=self, name_id=NameID).exists():
@ -508,7 +556,12 @@ class AstreGS(BaseResource):
name='get-contact',
perm='can_access',
description=_('Get contact details'),
parameters={'contact_id': {'description': _('Contact identifier'), 'example_value': '1111',}},
parameters={
'contact_id': {
'description': _('Contact identifier'),
'example_value': '1111',
}
},
)
def get_contact(self, request, contact_id):
r = self.call('Contact', 'Chargement', ContactCle={'idContact': contact_id})
@ -533,7 +586,9 @@ class AstreGS(BaseResource):
description=_('Delete contact'),
name='delete-contact',
perm='can_access',
parameters={'contact_id': {'description': _('Contact ID'), 'example_value': '4242'},},
parameters={
'contact_id': {'description': _('Contact ID'), 'example_value': '4242'},
},
)
def delete_contact(self, request, contact_id):
r = self.call('Contact', 'Suppression', ContactCle={'idContact': contact_id})
@ -615,31 +670,31 @@ class AstreGS(BaseResource):
r = self.call('TiersRib', 'Creation', TiersRib=post_data)
return {'data': serialize_object(r)}
@endpoint(
name='get-tiers-rib', perm='can_access',
name='get-tiers-rib',
perm='can_access',
description=_('Get RIB'),
parameters={
'CodeTiers': {'example_value': '42435'},
'IdRib': {'example_value': '4242'},
}
},
)
def get_tiers_rib(self, request, CodeTiers, IdRib):
payload = {'CodeTiers': CodeTiers, 'IdRib': IdRib}
r = self.call('TiersRib', 'Chargement', TiersRibCle=payload)
return {'data': serialize_object(r)}
@endpoint(
name='update-tiers-rib', perm='can_access',
name='update-tiers-rib',
perm='can_access',
post={
'description': _('Update RIB'),
'request_body': {'schema': {'application/json': TIERS_RIB_UPDATE_SCHEMA}}
'request_body': {'schema': {'application/json': TIERS_RIB_UPDATE_SCHEMA}},
},
parameters={
'CodeTiers': {'example_value': '42435'},
'IdRib': {'example_value': '4242'},
}
},
)
def update_tiers_rib(self, request, CodeTiers, IdRib, post_data):
post_data['CodeTiers'] = CodeTiers
@ -647,32 +702,33 @@ class AstreGS(BaseResource):
r = self.call('TiersRib', 'Modification', TiersRib=post_data)
return {'data': serialize_object(r)}
@endpoint(name='delete-tiers-rib', perm='can_access',
@endpoint(
name='delete-tiers-rib',
perm='can_access',
description=_('Delete RIB'),
parameters={
'CodeTiers': {'example_value': '42435'},
'IdRib': {'example_value': '4242'},
}
},
)
def delete_tiers_rib(self, request, CodeTiers, IdRib):
payload = {'CodeTiers': CodeTiers, 'IdRib': IdRib}
r = self.call('TiersRib', 'Suppression', TiersRibCle=payload)
return {'data': serialize_object(r)}
@endpoint(name='find-tiers-by-rib', perm='can_access',
@endpoint(
name='find-tiers-by-rib',
perm='can_access',
description=_('Find person by RIB'),
parameters={
'banque': {'example_value': '30001'},
'guichet': {'example_value': '00794'},
'numero_compte': {'example_value': '12345678901'},
'cle': {'example_value': '85'},
}
},
)
def find_tiers_by_rib(self, request, banque, guichet, numero_compte, cle, **kwargs):
criteres = {'banque': banque, 'guichet': guichet,
'numeroCompte': numero_compte,
'cleRIB': cle}
criteres = {'banque': banque, 'guichet': guichet, 'numeroCompte': numero_compte, 'cleRIB': cle}
# add other params to search criterias
criteres.update(kwargs)
r = self.search_tiers(criteres)

View File

@ -17,12 +17,30 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='ATALConnector',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('title', models.CharField(max_length=50, verbose_name='Title')),
('description', models.TextField(verbose_name='Description')),
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
('base_soap_url', models.URLField(help_text='URL of the base SOAP endpoint', max_length=400, verbose_name='Base SOAP endpoint')),
('users', models.ManyToManyField(blank=True, related_name='_atalconnector_users_+', related_query_name='+', to='base.ApiUser')),
(
'base_soap_url',
models.URLField(
help_text='URL of the base SOAP endpoint',
max_length=400,
verbose_name='Base SOAP endpoint',
),
),
(
'users',
models.ManyToManyField(
blank=True,
related_name='_atalconnector_users_+',
related_query_name='+',
to='base.ApiUser',
),
),
],
options={
'verbose_name': 'ATAL connector',

View File

@ -43,17 +43,14 @@ def process_response(demande_number):
class ATALConnector(BaseResource):
base_soap_url = models.URLField(
max_length=400, verbose_name=_('Base SOAP endpoint'),
help_text=_('URL of the base SOAP endpoint'))
max_length=400, verbose_name=_('Base SOAP endpoint'), help_text=_('URL of the base SOAP endpoint')
)
category = _('Business Process Connectors')
class Meta:
verbose_name = _('ATAL connector')
DEMANDE_NUMBER_PARAM = {
'description': _('Demande number'),
'example_value': 'DIT18050001'
}
DEMANDE_NUMBER_PARAM = {'description': _('Demande number'), 'example_value': 'DIT18050001'}
def _soap_call(self, wsdl, method, **kwargs):
wsdl_url = urllib.parse.urljoin(self.base_soap_url, '%s?wsdl' % wsdl)
@ -96,34 +93,29 @@ class ATALConnector(BaseResource):
return self._xml_ref('VilleAgileService', 'getTypesEquipement', 'types')
@endpoint(
perm='can_access', name='insert-action-comment',
perm='can_access',
name='insert-action-comment',
post={
'description': _('Insert action comment'),
'request_body': {
'schema': {
'application/json': schemas.INSERT_ACTION_COMMENT
}
}
}
'request_body': {'schema': {'application/json': schemas.INSERT_ACTION_COMMENT}},
},
)
def insert_action_comment(self, request, post_data):
demande_number = self._soap_call(
wsdl='DemandeService', method='insertActionComment',
wsdl='DemandeService',
method='insertActionComment',
numeroDemande=post_data['numero_demande'],
commentaire=post_data['commentaire']
commentaire=post_data['commentaire'],
)
return process_response(demande_number)
@endpoint(
perm='can_access', name='insert-demande-complet-by-type',
perm='can_access',
name='insert-demande-complet-by-type',
post={
'description': _('Insert demande complet by type'),
'request_body': {
'schema': {
'application/json': schemas.INSERT_DEMANDE_COMPLET_BY_TYPE
}
}
}
'request_body': {'schema': {'application/json': schemas.INSERT_DEMANDE_COMPLET_BY_TYPE}},
},
)
def insert_demande_complet_by_type(self, request, post_data):
data = {}
@ -170,39 +162,39 @@ class ATALConnector(BaseResource):
if recv in post_data:
data[send] = post_data[recv]
demande_number = self._soap_call(
wsdl='DemandeService', method='insertDemandeCompletByType', **data
)
demande_number = self._soap_call(wsdl='DemandeService', method='insertDemandeCompletByType', **data)
return process_response(demande_number)
@endpoint(
methods=['get'], perm='can_access', example_pattern='{demande_number}/',
pattern='^(?P<demande_number>\w+)/$', name='retrieve-details-demande',
parameters={
'demande_number': DEMANDE_NUMBER_PARAM
}
methods=['get'],
perm='can_access',
example_pattern='{demande_number}/',
pattern='^(?P<demande_number>\w+)/$',
name='retrieve-details-demande',
parameters={'demande_number': DEMANDE_NUMBER_PARAM},
)
def retrieve_details_demande(self, request, demande_number):
soap_res = self._soap_call(
wsdl='DemandeService', method='retrieveDetailsDemande',
demandeNumberParam=demande_number)
wsdl='DemandeService', method='retrieveDetailsDemande', demandeNumberParam=demande_number
)
return {'data': helpers.serialize_object(soap_res)}
@endpoint(
methods=['get'], perm='can_access', example_pattern='{demande_number}/',
pattern='^(?P<demande_number>\w+)/$', name='retrieve-etat-travaux',
parameters={
'demande_number': DEMANDE_NUMBER_PARAM
}
methods=['get'],
perm='can_access',
example_pattern='{demande_number}/',
pattern='^(?P<demande_number>\w+)/$',
name='retrieve-etat-travaux',
parameters={'demande_number': DEMANDE_NUMBER_PARAM},
)
def retrieve_etat_travaux(self, request, demande_number):
soap_res = self._soap_call(
wsdl='DemandeService', method='retrieveEtatTravaux',
numero=demande_number)
soap_res = self._soap_call(wsdl='DemandeService', method='retrieveEtatTravaux', numero=demande_number)
return {'data': helpers.serialize_object(soap_res)}
@endpoint(
methods=['get'], perm='can_access', example_pattern='{demande_number}/',
methods=['get'],
perm='can_access',
example_pattern='{demande_number}/',
pattern='^(?P<demande_number>\w+)/$',
parameters={
'demande_number': DEMANDE_NUMBER_PARAM,
@ -210,14 +202,14 @@ class ATALConnector(BaseResource):
'description': _('Full'),
'example_value': 'true',
'type': 'bool',
}
}
},
},
)
def infos(self, request, demande_number, full=False):
demand_details = helpers.serialize_object(
self._soap_call(
wsdl='DemandeService', method='retrieveDetailsDemande',
demandeNumberParam=demande_number)
wsdl='DemandeService', method='retrieveDetailsDemande', demandeNumberParam=demande_number
)
)
if not demand_details:
raise APIError('Could not get a status')
@ -230,18 +222,12 @@ class ATALConnector(BaseResource):
works_comments = []
if responses:
for response in responses:
comment = {
'text': response.get('commentaires'),
'date': None
}
comment = {'text': response.get('commentaires'), 'date': None}
if 'dateReponse' in response:
comment['date'] = dateformat.format(response['dateReponse'], DATE_FORMAT)
works_comments.append(comment)
works_comment = {
'text': None,
'date': None
}
works_comment = {'text': None, 'date': None}
if works_comments:
works_comment = works_comments[-1]
@ -249,22 +235,17 @@ class ATALConnector(BaseResource):
'status': status,
'works_comment': works_comment,
'demand_details': None,
'works_comments': []
'works_comments': [],
}
if full:
data['demand_details'] = demand_details
data['works_comments'] = works_comments
if status not in ('PRISE EN COMPTE', 'ARCHIVEE'):
return {
'data': data
}
return {'data': data}
works_status = helpers.serialize_object(
self._soap_call(
wsdl='DemandeService', method='retrieveEtatTravaux',
numero=demande_number
)
self._soap_call(wsdl='DemandeService', method='retrieveEtatTravaux', numero=demande_number)
)
status = works_status.get('libelle')
if not status:
@ -277,20 +258,14 @@ class ATALConnector(BaseResource):
if full:
data['works_status'] = works_status
return {
'data': data
}
return {'data': data}
@endpoint(
perm='can_access',
post={
'description': _('Upload a file'),
'request_body': {
'schema': {
'application/json': schemas.UPLOAD
}
}
}
'request_body': {'schema': {'application/json': schemas.UPLOAD}},
},
)
def upload(self, request, post_data):
try:
@ -301,23 +276,22 @@ class ATALConnector(BaseResource):
data = {
'donneesFichier': content,
'numeroDemande': post_data['numero_demande'],
'nomFichier': post_data['nom_fichier']
'nomFichier': post_data['nom_fichier'],
}
self._soap_call(
wsdl='ChargementPiecesJointesService', method='upload',
**data
)
self._soap_call(wsdl='ChargementPiecesJointesService', method='upload', **data)
return {}
@endpoint(
methods=['get'], perm='can_access', example_pattern='{demande_number}/',
pattern='^(?P<demande_number>\w+)/$', name='new-comments',
methods=['get'],
perm='can_access',
example_pattern='{demande_number}/',
pattern='^(?P<demande_number>\w+)/$',
name='new-comments',
parameters={
'demande_number': DEMANDE_NUMBER_PARAM,
}
},
)
def new_comments(self, request, demande_number, last_datetime=None):
def issup(datetime1, datetime2):
if datetime1.tzinfo is None or datetime2.tzinfo is None:
datetime1 = datetime1.replace(tzinfo=None)
@ -331,8 +305,8 @@ class ATALConnector(BaseResource):
demand_details = helpers.serialize_object(
self._soap_call(
wsdl='DemandeService', method='retrieveDetailsDemande',
demandeNumberParam=demande_number)
wsdl='DemandeService', method='retrieveDetailsDemande', demandeNumberParam=demande_number
)
)
if not demand_details:
raise APIError('Could not get comments')
@ -340,11 +314,7 @@ class ATALConnector(BaseResource):
new_comments, all_comments, last_date = [], [], None
responses = (demand_details.get('reponses') or {}).get('Reponse') or []
for response in responses:
comment = {
'text': response.get('commentaires'),
'date': None,
'date_raw': None
}
comment = {'text': response.get('commentaires'), 'date': None, 'date_raw': None}
dateobj = None
if 'dateReponse' in response:
dateobj = response['dateReponse']
@ -356,10 +326,4 @@ class ATALConnector(BaseResource):
if dateobj and issup(dateobj, last_datetime) or last_datetime is None:
if comment not in new_comments:
new_comments.append(comment)
return {
'data': {
'new_comments': new_comments,
'all_comments': all_comments,
'last_date': last_date
}
}
return {'data': {'new_comments': new_comments, 'all_comments': all_comments, 'last_date': last_date}}

View File

@ -122,25 +122,15 @@ INSERT_DEMANDE_COMPLET_BY_TYPE = {
'demande_commentaire': {
'type': 'string',
},
'remote_adresse': {
'type': 'string'
},
'demande_mots_cles': {
'type': 'string'
},
'remote_adresse': {'type': 'string'},
'demande_mots_cles': {'type': 'string'},
'code_thematique': {
'type': 'string',
},
'code_priorite': {
'type': 'string'
},
'demande_thematique': {
'type': 'string'
},
'code_projet': {
'type': 'string'
}
}
'code_priorite': {'type': 'string'},
'demande_thematique': {'type': 'string'},
'code_projet': {'type': 'string'},
},
}
INSERT_ACTION_COMMENT = {
@ -153,8 +143,8 @@ INSERT_ACTION_COMMENT = {
},
'commentaire': {
'type': 'string',
}
}
},
},
}
UPLOAD = {
@ -169,13 +159,13 @@ UPLOAD = {
'content': {
'type': 'string',
},
}
},
},
'numero_demande': {
'type': 'string',
},
'nom_fichier': {
'type': 'string',
}
}
},
},
}

View File

@ -19,7 +19,10 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Link',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('name_id', models.CharField(max_length=256, verbose_name='NameID')),
('id_per', models.CharField(max_length=64, verbose_name='ID Per')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Creation date')),
@ -32,20 +35,65 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Resource',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('title', models.CharField(max_length=50, verbose_name='Title')),
('description', models.TextField(verbose_name='Description')),
('slug', models.SlugField(verbose_name='Identifier', unique=True)),
('log_level', models.CharField(choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL'), (b'FATAL', b'FATAL')], default=b'INFO', max_length=10, verbose_name='Log Level')),
('basic_auth_username', models.CharField(blank=True, max_length=128, verbose_name='Basic authentication username')),
('basic_auth_password', models.CharField(blank=True, max_length=128, verbose_name='Basic authentication password')),
('client_certificate', models.FileField(blank=True, null=True, upload_to=b'', verbose_name='TLS client certificate')),
('trusted_certificate_authorities', models.FileField(blank=True, null=True, upload_to=b'', verbose_name='TLS trusted CAs')),
(
'log_level',
models.CharField(
choices=[
(b'NOTSET', b'NOTSET'),
(b'DEBUG', b'DEBUG'),
(b'INFO', b'INFO'),
(b'WARNING', b'WARNING'),
(b'ERROR', b'ERROR'),
(b'CRITICAL', b'CRITICAL'),
(b'FATAL', b'FATAL'),
],
default=b'INFO',
max_length=10,
verbose_name='Log Level',
),
),
(
'basic_auth_username',
models.CharField(
blank=True, max_length=128, verbose_name='Basic authentication username'
),
),
(
'basic_auth_password',
models.CharField(
blank=True, max_length=128, verbose_name='Basic authentication password'
),
),
(
'client_certificate',
models.FileField(
blank=True, null=True, upload_to=b'', verbose_name='TLS client certificate'
),
),
(
'trusted_certificate_authorities',
models.FileField(blank=True, null=True, upload_to=b'', verbose_name='TLS trusted CAs'),
),
('verify_cert', models.BooleanField(default=True, verbose_name='TLS verify certificates')),
('http_proxy', models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy')),
(
'http_proxy',
models.CharField(blank=True, max_length=128, verbose_name='HTTP and HTTPS proxy'),
),
('webservice_base_url', models.URLField(verbose_name='Webservice Base URL')),
('cod_rgp', models.CharField(default=b'RGP_PUB', max_length=64, verbose_name='Code RGP')),
('users', models.ManyToManyField(blank=True, related_name='_link_users_+', related_query_name='+', to='base.ApiUser')),
(
'users',
models.ManyToManyField(
blank=True, related_name='_link_users_+', related_query_name='+', to='base.ApiUser'
),
),
],
options={
'verbose_name': 'ATOS Genesys',

View File

@ -15,7 +15,9 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='resource',
name='client_certificate',
field=models.FileField(blank=True, null=True, upload_to='', verbose_name='TLS client certificate'),
field=models.FileField(
blank=True, null=True, upload_to='', verbose_name='TLS client certificate'
),
),
migrations.AlterField(
model_name='resource',

View File

@ -93,7 +93,7 @@ class Resource(BaseResource, HTTPResource):
continue
categories[xmlutils.text_content(code)] = {
'label': xmlutils.text_content(label),
'codifications': []
'codifications': [],
}
for codification in root.findall('CODIFICATIONS/CODIFICATIONS_ROW'):
code = codification.find('CD_CODIF')
@ -107,11 +107,15 @@ class Resource(BaseResource, HTTPResource):
if category_cod not in categories:
self.logger.warning('unknown category: %s', category_cod)
continue
categories[category_cod]['codifications'].append({
'code': xmlutils.text_content(code),
'label': xmlutils.text_content(label),
'enabled': xmlutils.text_content(in_val).strip().lower() == 'o' if in_val is not None else True,
})
categories[category_cod]['codifications'].append(
{
'code': xmlutils.text_content(code),
'label': xmlutils.text_content(label),
'enabled': xmlutils.text_content(in_val).strip().lower() == 'o'
if in_val is not None
else True,
}
)
return categories
def get_codifications(self):
@ -119,40 +123,43 @@ class Resource(BaseResource, HTTPResource):
function=self.call_select_codifications,
row=self,
key_prefix='atos-genesys-codifications',
logger=self.logger)
logger=self.logger,
)
return cache()
@endpoint(name='codifications',
description=_('List of codifications categories'))
@endpoint(name='codifications', description=_('List of codifications categories'))
def codifications(self, request):
codifications = self.get_codifications()
items = []
for code, category in codifications.items():
items.append({
'id': code,
'label': category['label'],
})
items.append(
{
'id': code,
'label': category['label'],
}
)
items.sort(key=lambda c: c['label'])
return {'data': items}
@endpoint(name='codifications',
pattern=r'^(?P<category>[\w-]+)/$',
example_pattern='{category}/',
description=_('List of codifications'),
parameters={
'category': {
'description': _('Category of codification'),
'example_value': u'MOT_APA',
}
})
@endpoint(
name='codifications',
pattern=r'^(?P<category>[\w-]+)/$',
example_pattern='{category}/',
description=_('List of codifications'),
parameters={
'category': {
'description': _('Category of codification'),
'example_value': u'MOT_APA',
}
},
)
def codifications_list(self, request, category):
codifications = self.get_codifications().get(category, {}).get('codifications', [])
items = [{
'id': codification['code'],
'text': codification['label']
} for codification in codifications]
items = [
{'id': codification['code'], 'text': codification['label']} for codification in codifications
]
return {'data': items}
def check_status(self):
@ -163,11 +170,14 @@ class Resource(BaseResource, HTTPResource):
return urlparse.urljoin(self.base_url, 'WSUsagerPublik/services/PublikService/selectAppairage')
def call_select_appairage(self, login, password, email):
row = self.xml_request(self.select_appairage_url, params={
'login': login,
'pwd': password,
'email': email,
})
row = self.xml_request(
self.select_appairage_url,
params={
'login': login,
'pwd': password,
'email': email,
},
)
row_d = xmlutils.to_json(row)
id_per = row_d.get('ID_PER', '').strip()
code = row_d.get('CD_RET', '').strip()
@ -175,72 +185,70 @@ class Resource(BaseResource, HTTPResource):
error = None
if code not in ['1', '2', '3', '4', '5', '6']:
error = 'invalid CD_RET: %s' % code,
error = ('invalid CD_RET: %s' % code,)
if code in ['2', '3', '5'] and not id_per:
error = 'missing ID_PER'
if error:
raise APIError(error, data={'response': repr(ET.tostring(row))})
return code, label, id_per
@endpoint(name='link',
methods=['post'],
description=_('Create link with an extranet account'),
perm='can_access',
parameters={
'NameID':{
'description': _('Publik NameID'),
'example_value': 'xyz24d934',
},
'email': {
'description': _('Publik known email'),
'example_value': 'john.doe@example.com',
},
'login': {
'description': _('ATOS Genesys extranet login'),
'example_value': '1234',
},
'password': {
'description': _('ATOS Genesys extranet password'),
'example_value': 'password',
}
})
@endpoint(
name='link',
methods=['post'],
description=_('Create link with an extranet account'),
perm='can_access',
parameters={
'NameID': {
'description': _('Publik NameID'),
'example_value': 'xyz24d934',
},
'email': {
'description': _('Publik known email'),
'example_value': 'john.doe@example.com',
},
'login': {
'description': _('ATOS Genesys extranet login'),
'example_value': '1234',
},
'password': {
'description': _('ATOS Genesys extranet password'),
'example_value': 'password',
},
},
)
def link(self, request, NameID, email, login, password):
code, label, id_per = self.call_select_appairage(login, password, email)
if code in ['2', '3', '5']:
link, created = Link.objects.get_or_create(
resource=self,
name_id=NameID,
id_per=id_per)
link, created = Link.objects.get_or_create(resource=self, name_id=NameID, id_per=id_per)
return {'link_id': link.pk, 'new': created, 'code': code, 'label': label}
elif code == '6':
raise APIError('unknown-login', data={'code': code, 'label': label})
elif code in ['4', '1']:
raise APIError('invalid-password', data={'code': code, 'label': label})
@endpoint(name='unlink',
methods=['post'],
description=_('Delete link with an extranet account'),
perm='can_access',
parameters={
'NameID':{
'description': _('Publik NameID'),
'example_value': 'xyz24d934',
},
'link_id': {
'description': _('Identifier of the link'),
'example_value': '1',
},
})
@endpoint(
name='unlink',
methods=['post'],
description=_('Delete link with an extranet account'),
perm='can_access',
parameters={
'NameID': {
'description': _('Publik NameID'),
'example_value': 'xyz24d934',
},
'link_id': {
'description': _('Identifier of the link'),
'example_value': '1',
},
},
)
def unlink(self, request, NameID, link_id):
try:
link_id = int(link_id.strip())
except ValueError:
raise APIError('invalid link_id')
qs = Link.objects.filter(
resource=self,
name_id=NameID,
pk=link_id)
qs = Link.objects.filter(resource=self, name_id=NameID, pk=link_id)
count = qs.count()
qs.delete()
return {'deleted': count}
@ -250,10 +258,13 @@ class Resource(BaseResource, HTTPResource):
return urlparse.urljoin(self.base_url, 'WSUsagerPublik/services/PublikService/selectUsager')
def call_select_usager(self, id_per):
row = self.xml_request(self.select_usager_url, params={
'idPer': id_per,
'codRgp': self.cod_rgp,
})
row = self.xml_request(
self.select_usager_url,
params={
'idPer': id_per,
'codRgp': self.cod_rgp,
},
)
return self._select_usager_row_to_json(row)
def _select_usager_row_to_json(self, row):
@ -275,19 +286,19 @@ class Resource(BaseResource, HTTPResource):
identification['CIVILITE'] = {'M': u'Monsieur', 'F': u'Madame'}.get(sexe, '')
return d
@endpoint(name='dossiers',
description=_('Get datas for all links'),
perm='can_access',
parameters={
'NameID':{
'description': _('Publik NameID'),
'example_value': 'xyz24d934',
},
})
@endpoint(
name='dossiers',
description=_('Get datas for all links'),
perm='can_access',
parameters={
'NameID': {
'description': _('Publik NameID'),
'example_value': 'xyz24d934',
},
},
)
def dossiers(self, request, NameID, link_id=None):
qs = Link.objects.filter(
resource=self,
name_id=NameID)
qs = Link.objects.filter(resource=self, name_id=NameID)
if link_id:
try:
link_id = int(link_id)
@ -300,7 +311,8 @@ class Resource(BaseResource, HTTPResource):
function=self.call_select_usager,
row=link,
key_prefix='atos-genesys-usager',
logger=self.logger)
logger=self.logger,
)
dossier = cache(link.id_per)
# build text as "id_per - prenom - no
text_parts = [str(link.id_per), '-']
@ -312,12 +324,14 @@ class Resource(BaseResource, HTTPResource):
text_parts.append(prenom.title())
if nom:
text_parts.append(nom.upper())
data.append({
'id': str(link.id),
'text': u' '.join(text_parts),
'id_per': link.id_per,
'dossier': dossier,
})
data.append(
{
'id': str(link.id),
'text': u' '.join(text_parts),
'id_per': link.id_per,
'dossier': dossier,
}
)
if link_id:
return {'data': data[0] if data else None}
return {'data': data}
@ -327,10 +341,13 @@ class Resource(BaseResource, HTTPResource):
return urlparse.urljoin(self.base_url, 'WSUsagerPublik/services/PublikService/selectUsagerByRef')
def call_select_usager_by_ref(self, ref_per):
row = self.xml_request(self.select_usager_by_ref_url, params={
'refPer': ref_per,
'codRgp': self.cod_rgp,
})
row = self.xml_request(
self.select_usager_by_ref_url,
params={
'refPer': ref_per,
'codRgp': self.cod_rgp,
},
)
return self._select_usager_row_to_json(row)
@property
@ -338,31 +355,36 @@ class Resource(BaseResource, HTTPResource):
return urlparse.urljoin(self.base_url, 'WSUsagerPublik/services/PublikService/chercheBeneficiaire')
def call_cherche_beneficiaire(self, prenom, nom, dob):
rows = self.xml_request_multiple(self.cherche_beneficiaire_url, params={
'nmPer': nom,
'prPer': prenom,
'dtNaissance': dob.strftime('%d/%m/%Y'),
})
rows = self.xml_request_multiple(
self.cherche_beneficiaire_url,
params={
'nmPer': nom,
'prPer': prenom,
'dtNaissance': dob.strftime('%d/%m/%Y'),
},
)
beneficiaires = [xmlutils.to_json(row) for row in rows]
return beneficiaires
@endpoint(name='search',
description=_('Search for beneficiaries'),
perm='can_access',
parameters={
'first_name': {
'description': _('Beneficiary first name'),
'example_value': 'John',
},
'last_name': {
'description': _('Beneficiary last name'),
'example_value': 'Doe',
},
'date_of_birth': {
'description': _('Beneficiary date of birth'),
'example_value': '1987-10-23',
}
})
@endpoint(
name='search',
description=_('Search for beneficiaries'),
perm='can_access',
parameters={
'first_name': {
'description': _('Beneficiary first name'),
'example_value': 'John',
},
'last_name': {
'description': _('Beneficiary last name'),
'example_value': 'Doe',
},
'date_of_birth': {
'description': _('Beneficiary date of birth'),
'example_value': '1987-10-23',
},
},
)
def search(self, request, first_name, last_name, date_of_birth, NameID=None, commune_naissance=None):
try:
date_of_birth = datetime.datetime.strptime(date_of_birth, '%Y-%m-%d').date()
@ -373,10 +395,7 @@ class Resource(BaseResource, HTTPResource):
if commune_naissance:
# convert commune_naissance to ASCII
commune_naissance = to_ascii(commune_naissance).lower()
beneficiaires = self.call_cherche_beneficiaire(
prenom=first_name,
nom=last_name,
dob=date_of_birth)
beneficiaires = self.call_cherche_beneficiaire(prenom=first_name, nom=last_name, dob=date_of_birth)
data = []
dossiers = []
# get dossiers of found beneficiaries
@ -410,8 +429,12 @@ class Resource(BaseResource, HTTPResource):
if commune_naissance:
cmu_nais = to_ascii(identification.get('CMU_NAIS', '')).lower()
if cmu_nais and commune_naissance != cmu_nais:
self.logger.debug(u'id_per %s: CMU_NAIS(%s) does not match commune_naissance(%s)',
id_per, cmu_nais, commune_naissance)
self.logger.debug(
u'id_per %s: CMU_NAIS(%s) does not match commune_naissance(%s)',
id_per,
cmu_nais,
commune_naissance,
)
continue
dossiers.append(dossier)
@ -431,38 +454,41 @@ class Resource(BaseResource, HTTPResource):
tel2 = ''.join(c for c in identification.get('TEL_FIXE', '') if is_number(c))
email = identification.get('MAIL', '').strip()
if tel1 and tel1[:2] in ('06', '07'):
data.append({
'id': 'tel1',
'text': 'par SMS vers ' + tel1[:2] + '*****' + tel1[-3:],
'phone': tel1,
'id_per': id_per,
'nom': nom,
'prenom': prenom,
'nom_naissance': nom_naissance,
})
data.append(
{
'id': 'tel1',
'text': 'par SMS vers ' + tel1[:2] + '*****' + tel1[-3:],
'phone': tel1,
'id_per': id_per,
'nom': nom,
'prenom': prenom,
'nom_naissance': nom_naissance,
}
)
if tel2 and tel2[:2] in ('06', '07'):
data.append({
'id': 'tel2',
'text': 'par SMS vers ' + tel2[:2] + '*****' + tel2[-3:],
'phone': tel2,
'id_per': id_per,
'nom': nom,
'prenom': prenom,
'nom_naissance': nom_naissance,
})
data.append(
{
'id': 'tel2',
'text': 'par SMS vers ' + tel2[:2] + '*****' + tel2[-3:],
'phone': tel2,
'id_per': id_per,
'nom': nom,
'prenom': prenom,
'nom_naissance': nom_naissance,
}
)
if email:
data.append({
'id': 'email1',
'text': 'par courriel vers ' + email[:2] + '***@***' + email[-3:],
'email': email,
'id_per': id_per,
'nom': nom,
'prenom': prenom,
'nom_naissance': nom_naissance,
})
data.append(
{
'id': 'email1',
'text': 'par courriel vers ' + email[:2] + '***@***' + email[-3:],
'email': email,
'id_per': id_per,
'nom': nom,
'prenom': prenom,
'nom_naissance': nom_naissance,
}
)
if len(data) == 0:
self.logger.debug('id_per %s: no contact information, ignored', id_per)
raise APIError('no-contacts')
@ -476,50 +502,39 @@ class Resource(BaseResource, HTTPResource):
'link_id': link and link.id,
}
@endpoint(name='link-by-id-per',
methods=['post'],
description=_('Create link with an extranet account'),
perm='can_access',
parameters={
'NameID': {
'description': _('Publik NameID'),
'example_value': 'xyz24d934',
},
'id_per': {
'description': _('ATOS Genesys ID_PER'),
'example_value': '767676',
}
})
@endpoint(
name='link-by-id-per',
methods=['post'],
description=_('Create link with an extranet account'),
perm='can_access',
parameters={
'NameID': {
'description': _('Publik NameID'),
'example_value': 'xyz24d934',
},
'id_per': {
'description': _('ATOS Genesys ID_PER'),
'example_value': '767676',
},
},
)
def link_by_id_per(self, request, NameID, id_per):
dossier = self.call_select_usager(id_per)
link, created = Link.objects.get_or_create(
resource=self,
name_id=NameID,
id_per=id_per)
link, created = Link.objects.get_or_create(resource=self, name_id=NameID, id_per=id_per)
return {'link_id': link.pk, 'new': created}
class Link(models.Model):
resource = models.ForeignKey(
Resource,
on_delete=models.CASCADE)
name_id = models.CharField(
verbose_name=_('NameID'),
blank=False,
max_length=256)
id_per = models.CharField(
verbose_name=_('ID Per'),
blank=False,
max_length=64)
created = models.DateTimeField(
verbose_name=_('Creation date'),
auto_now_add=True)
extra = JSONField(
verbose_name=_('Anything'),
null=True)
resource = models.ForeignKey(Resource, on_delete=models.CASCADE)
name_id = models.CharField(verbose_name=_('NameID'), blank=False, max_length=256)
id_per = models.CharField(verbose_name=_('ID Per'), blank=False, max_length=64)
created = models.DateTimeField(verbose_name=_('Creation date'), auto_now_add=True)
extra = JSONField(verbose_name=_('Anything'), null=True)
class Meta:
unique_together = (
'resource', 'name_id', 'id_per',
'resource',
'name_id',
'id_per',
)
ordering = ['created']

View File

@ -25,11 +25,12 @@ def row_lock(row):
class RowLockedCache(object):
'''Cache return value of a function, always return the cached value for
performance but if the cache is stale update it asynchronously using
a thread, prevent multiple update using row locks on database models and
an update cache key.
'''
"""Cache return value of a function, always return the cached value for
performance but if the cache is stale update it asynchronously using
a thread, prevent multiple update using row locks on database models and
an update cache key.
"""
def __init__(self, function, logger=None, row=None, duration=DEFAULT_DURATION, key_prefix=None):
self.function = function
self.row = row

View File

@ -14,12 +14,28 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='BaseAddresse',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('title', models.CharField(verbose_name='Title', max_length=50)),
('slug', models.SlugField(verbose_name='Identifier', unique=True)),
('description', models.TextField(verbose_name='Description')),
('service_url', models.CharField(help_text='Base Adresse Web Service URL', max_length=128, verbose_name='Service URL')),
('users', models.ManyToManyField(to='base.ApiUser', related_name='_baseaddresse_users_+', related_query_name='+', blank=True)),
(
'service_url',
models.CharField(
help_text='Base Adresse Web Service URL', max_length=128, verbose_name='Service URL'
),
),
(
'users',
models.ManyToManyField(
to='base.ApiUser',
related_name='_baseaddresse_users_+',
related_query_name='+',
blank=True,
),
),
],
options={
'verbose_name': 'Base Adresse Web Service',

View File

@ -14,7 +14,12 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='baseaddresse',
name='service_url',
field=models.CharField(default=b'https://api-adresse.data.gouv.fr/', help_text='Base Adresse Web Service URL', max_length=128, verbose_name='Service URL'),
field=models.CharField(
default=b'https://api-adresse.data.gouv.fr/',
help_text='Base Adresse Web Service URL',
max_length=128,
verbose_name='Service URL',
),
preserve_default=True,
),
]

View File

@ -14,7 +14,13 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='baseaddresse',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Debug Enabled',
blank=True,
choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')],
),
preserve_default=True,
),
]

View File

@ -14,7 +14,12 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='baseaddresse',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Log Level',
choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')],
),
preserve_default=True,
),
]

View File

@ -14,7 +14,19 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='baseaddresse',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Log Level',
choices=[
(b'NOTSET', b'NOTSET'),
(b'DEBUG', b'DEBUG'),
(b'INFO', b'INFO'),
(b'WARNING', b'WARNING'),
(b'ERROR', b'ERROR'),
(b'CRITICAL', b'CRITICAL'),
],
),
preserve_default=True,
),
]

View File

@ -10,6 +10,4 @@ class Migration(migrations.Migration):
('base_adresse', '0005_auto_20160407_0456'),
]
operations = [
migrations.RenameModel('BaseAddresse', 'BaseAdresse')
]
operations = [migrations.RenameModel('BaseAddresse', 'BaseAdresse')]

View File

@ -14,7 +14,10 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='StreetModel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('city', models.CharField(max_length=100, verbose_name='City')),
('name', models.CharField(max_length=150, verbose_name='Street name')),
('zipcode', models.CharField(max_length=5, verbose_name='Postal code')),
@ -26,7 +29,10 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='UpdateStreetModel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('zipcode', models.CharField(max_length=5, verbose_name='Postal code')),
('start_time', models.DateTimeField(null=True, verbose_name='Start of update')),
('end_time', models.DateTimeField(null=True, verbose_name='End of update')),
@ -35,6 +41,8 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='baseadresse',
name='zipcode',
field=models.CharField(max_length=5, verbose_name='Postal codes to get streets, separated with commas', blank=True),
field=models.CharField(
max_length=5, verbose_name='Postal codes to get streets, separated with commas', blank=True
),
),
]

View File

@ -15,6 +15,10 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='baseadresse',
name='zipcode',
field=models.CharField(blank=True, max_length=600, verbose_name='Postal codes or county number to get streets, separated with commas'),
field=models.CharField(
blank=True,
max_length=600,
verbose_name='Postal codes or county number to get streets, separated with commas',
),
),
]

View File

@ -17,9 +17,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='CityModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('name', models.CharField(max_length=150, verbose_name='City name')),
('unaccent_name', models.CharField(max_length=150, null=True, verbose_name='City name ascii char')),
(
'unaccent_name',
models.CharField(max_length=150, null=True, verbose_name='City name ascii char'),
),
('code', models.CharField(max_length=5, verbose_name='INSEE code')),
('zipcode', models.CharField(max_length=5, verbose_name='Postal code')),
('population', models.PositiveIntegerField(verbose_name='Population')),
@ -33,9 +39,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='DepartmentModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('name', models.CharField(max_length=100, verbose_name='Department name')),
('unaccent_name', models.CharField(max_length=150, null=True, verbose_name='Department name ascii char')),
(
'unaccent_name',
models.CharField(max_length=150, null=True, verbose_name='Department name ascii char'),
),
('code', models.CharField(max_length=3, unique=True, verbose_name='Department code')),
('last_update', models.DateTimeField(auto_now=True, null=True, verbose_name='Last update')),
],
@ -47,9 +59,15 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='RegionModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('name', models.CharField(max_length=150, verbose_name='Region name')),
('unaccent_name', models.CharField(max_length=150, null=True, verbose_name='Region name ascii char')),
(
'unaccent_name',
models.CharField(max_length=150, null=True, verbose_name='Region name ascii char'),
),
('code', models.CharField(max_length=2, unique=True, verbose_name='Region code')),
('last_update', models.DateTimeField(auto_now=True, null=True, verbose_name='Last update')),
],
@ -61,12 +79,21 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='baseadresse',
name='api_geo_url',
field=models.CharField(default=b'https://geo.api.gouv.fr/', help_text='Base Adresse API Geo URL', max_length=128, verbose_name='API Geo URL'),
field=models.CharField(
default=b'https://geo.api.gouv.fr/',
help_text='Base Adresse API Geo URL',
max_length=128,
verbose_name='API Geo URL',
),
),
migrations.AlterField(
model_name='baseadresse',
name='zipcode',
field=models.CharField(blank=True, max_length=600, verbose_name='Postal codes or department number to get streets, separated with commas'),
field=models.CharField(
blank=True,
max_length=600,
verbose_name='Postal codes or department number to get streets, separated with commas',
),
),
migrations.AlterField(
model_name='streetmodel',
@ -76,17 +103,29 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='departmentmodel',
name='region',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base_adresse.RegionModel'),
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to='base_adresse.RegionModel'
),
),
migrations.AddField(
model_name='citymodel',
name='department',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='base_adresse.DepartmentModel'),
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to='base_adresse.DepartmentModel',
),
),
migrations.AddField(
model_name='citymodel',
name='region',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='base_adresse.RegionModel'),
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to='base_adresse.RegionModel',
),
),
migrations.AlterUniqueTogether(
name='citymodel',

View File

@ -16,7 +16,10 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='AddressCacheModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('api_id', models.CharField(max_length=30, unique=True)),
('data', django.contrib.postgres.fields.jsonb.JSONField(default=dict)),
('timestamp', models.DateTimeField(auto_now=True)),
@ -25,11 +28,21 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='baseadresse',
name='latitude',
field=models.FloatField(blank=True, help_text='Geographic priority for /addresses/ endpoint.', null=True, verbose_name='Latitude'),
field=models.FloatField(
blank=True,
help_text='Geographic priority for /addresses/ endpoint.',
null=True,
verbose_name='Latitude',
),
),
migrations.AddField(
model_name='baseadresse',
name='longitude',
field=models.FloatField(blank=True, help_text='Geographic priority for /addresses/ endpoint.', null=True, verbose_name='Longitude'),
field=models.FloatField(
blank=True,
help_text='Geographic priority for /addresses/ endpoint.',
null=True,
verbose_name='Longitude',
),
),
]

View File

@ -21,11 +21,21 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='baseadresse',
name='api_geo_url',
field=models.CharField(default='https://geo.api.gouv.fr/', help_text='Base Adresse API Geo URL', max_length=128, verbose_name='API Geo URL'),
field=models.CharField(
default='https://geo.api.gouv.fr/',
help_text='Base Adresse API Geo URL',
max_length=128,
verbose_name='API Geo URL',
),
),
migrations.AlterField(
model_name='baseadresse',
name='service_url',
field=models.CharField(default='https://api-adresse.data.gouv.fr/', help_text='Base Adresse Web Service URL', max_length=128, verbose_name='Service URL'),
field=models.CharField(
default='https://api-adresse.data.gouv.fr/',
help_text='Base Adresse Web Service URL',
max_length=128,
verbose_name='Service URL',
),
),
]

View File

@ -22,16 +22,20 @@ from passerelle.utils.jsonresponse import APIError
class BaseAdresse(BaseResource):
service_url = models.CharField(
max_length=128, blank=False,
max_length=128,
blank=False,
default='https://api-adresse.data.gouv.fr/',
verbose_name=_('Service URL'),
help_text=_('Base Adresse Web Service URL'))
help_text=_('Base Adresse Web Service URL'),
)
api_geo_url = models.CharField(
max_length=128, blank=False,
max_length=128,
blank=False,
default='https://geo.api.gouv.fr/',
verbose_name=_('API Geo URL'),
help_text=_('Base Adresse API Geo URL'))
help_text=_('Base Adresse API Geo URL'),
)
category = _('Geographic information system')
@ -46,15 +50,18 @@ class BaseAdresse(BaseResource):
zipcode = models.CharField(
max_length=600,
blank=True,
verbose_name=_('Postal codes or department number to get streets, separated with commas'))
verbose_name=_('Postal codes or department number to get streets, separated with commas'),
)
latitude = models.FloatField(
null=True, blank=True,
null=True,
blank=True,
verbose_name=_('Latitude'),
help_text=_('Geographic priority for /addresses/ endpoint.'),
)
longitude = models.FloatField(
null=True, blank=True,
null=True,
blank=True,
verbose_name=_('Longitude'),
help_text=_('Geographic priority for /addresses/ endpoint.'),
)
@ -78,29 +85,38 @@ class BaseAdresse(BaseResource):
elif prop == 'name':
house_number = data['properties'].get('housenumber')
if house_number and value.startswith(house_number):
value = value[len(house_number):].strip()
value = value[len(house_number) :].strip()
result['address']['road'] = value
elif prop == 'id':
result['id'] = value
return result
@endpoint(pattern='(?P<q>.+)?$',
description=_('Addresses list'),
parameters={
'id': {'description': _('Address identifier')},
'q': {'description': _('Address'), 'example_value': '169 rue du chateau, paris'},
'page_limit': {'description': _('Maximum number of results to return. Must be '
'lower than 20.')},
'zipcode': {'description': _('Zipcode'), 'example_value': '75014'},
'citycode': {'description': _('INSEE City code')},
'lat': {'description': _('Prioritize results according to coordinates. "lon" '
'parameter must also be present.')},
'lon': {'description': _('Prioritize results according to coordinates. "lat" '
'parameter must also be present.')},
})
def addresses(self, request, id=None, q=None,
zipcode='', citycode=None,
lat=None, lon=None, page_limit=5):
@endpoint(
pattern='(?P<q>.+)?$',
description=_('Addresses list'),
parameters={
'id': {'description': _('Address identifier')},
'q': {'description': _('Address'), 'example_value': '169 rue du chateau, paris'},
'page_limit': {
'description': _('Maximum number of results to return. Must be ' 'lower than 20.')
},
'zipcode': {'description': _('Zipcode'), 'example_value': '75014'},
'citycode': {'description': _('INSEE City code')},
'lat': {
'description': _(
'Prioritize results according to coordinates. "lon" ' 'parameter must also be present.'
)
},
'lon': {
'description': _(
'Prioritize results according to coordinates. "lat" ' 'parameter must also be present.'
)
},
},
)
def addresses(
self, request, id=None, q=None, zipcode='', citycode=None, lat=None, lon=None, page_limit=5
):
if id is not None:
try:
address = AddressCacheModel.objects.get(api_id=id)
@ -145,34 +161,47 @@ class BaseAdresse(BaseResource):
data = self.format_address_data(feature)
result.append(data)
address, created = AddressCacheModel.objects.get_or_create(
api_id=data['id'], defaults={'data': data})
api_id=data['id'], defaults={'data': data}
)
if not created:
address.update_timestamp()
return {'data': result}
@endpoint(pattern='(?P<q>.+)?$', description=_('Geocoding (Nominatim API)'),
parameters={
'q': {'description': _('Address'), 'example_value': '169 rue du chateau, paris'},
'zipcode': {'description': _('Zipcode')},
'citycode': {'description': _('INSEE City code')},
'lat': {'description': _('Prioritize results according to coordinates. "lat" '
'parameter must be present.')},
'lon': {'description': _('Prioritize results according to coordinates. "lon" '
'parameter must be present.')},
})
@endpoint(
pattern='(?P<q>.+)?$',
description=_('Geocoding (Nominatim API)'),
parameters={
'q': {'description': _('Address'), 'example_value': '169 rue du chateau, paris'},
'zipcode': {'description': _('Zipcode')},
'citycode': {'description': _('INSEE City code')},
'lat': {
'description': _(
'Prioritize results according to coordinates. "lat" ' 'parameter must be present.'
)
},
'lon': {
'description': _(
'Prioritize results according to coordinates. "lon" ' 'parameter must be present.'
)
},
},
)
def search(self, request, q, zipcode='', citycode=None, lat=None, lon=None, **kwargs):
if kwargs.get('format', 'json') != 'json':
raise NotImplementedError()
result = self.addresses(request, q=q, zipcode=zipcode, citycode=citycode,
lat=lat, lon=lon, page_limit=1)
result = self.addresses(
request, q=q, zipcode=zipcode, citycode=citycode, lat=lat, lon=lon, page_limit=1
)
return result['data']
@endpoint(description=_('Reverse geocoding'),
parameters={
'lat': {'description': _('Latitude'), 'example_value': 48.833708},
'lon': {'description': _('Longitude'), 'example_value': 2.323349},
})
@endpoint(
description=_('Reverse geocoding'),
parameters={
'lat': {'description': _('Latitude'), 'example_value': 48.833708},
'lon': {'description': _('Longitude'), 'example_value': 2.323349},
},
)
def reverse(self, request, lat, lon, **kwargs):
if kwargs.get('format', 'json') != 'json':
raise NotImplementedError()
@ -196,18 +225,18 @@ class BaseAdresse(BaseResource):
break
return result
@endpoint(description=_('Streets from zipcode'),
parameters={
'id': {'description': _('Street identifier')},
'q': {'description': _("Street name")},
'zipcode': {'description': _('Zipcode')},
'citycode': {'description': _('INSEE City code')},
'page_limit': {'description': _('Maximum number of results to return'),
'example_value': 30},
'distinct': {'description': _('Remove duplicate streets')},
})
def streets(self, request, zipcode=None, citycode=None,
q=None, id=None, distinct=True, page_limit=None):
@endpoint(
description=_('Streets from zipcode'),
parameters={
'id': {'description': _('Street identifier')},
'q': {'description': _("Street name")},
'zipcode': {'description': _('Zipcode')},
'citycode': {'description': _('INSEE City code')},
'page_limit': {'description': _('Maximum number of results to return'), 'example_value': 30},
'distinct': {'description': _('Remove duplicate streets')},
},
)
def streets(self, request, zipcode=None, citycode=None, q=None, id=None, distinct=True, page_limit=None):
result = []
if id is not None:
try:
@ -234,29 +263,38 @@ class BaseAdresse(BaseResource):
streets = streets[:page_limit]
for street in streets:
result.append({'id': str(street.id),
'text': street.name,
'type': street.type,
'city': street.city,
'citycode': street.citycode,
'zipcode': street.zipcode})
result.append(
{
'id': str(street.id),
'text': street.name,
'type': street.type,
'city': street.city,
'citycode': street.citycode,
'zipcode': street.zipcode,
}
)
return {'data': result}
@endpoint(description=_('Cities list'),
parameters={
'id': {'description': _('Get exactly one city using its code and postal code '
'separated with a dot'),
'example_value': '75056.75014'},
'q': {'description': _("Search text in name or postal code"),
'example_value': 'Paris'},
'code': {'description': _('INSEE code (or multiple codes separated with commas)'),
'example_value': '75056'},
'region_code': {'description': _('Region code'), 'example_value': '11'},
'department_code': {'description': _('Department code'), 'example_value': '75'},
})
def cities(self, request, id=None, q=None, code=None, region_code=None,
department_code=None):
@endpoint(
description=_('Cities list'),
parameters={
'id': {
'description': _(
'Get exactly one city using its code and postal code ' 'separated with a dot'
),
'example_value': '75056.75014',
},
'q': {'description': _("Search text in name or postal code"), 'example_value': 'Paris'},
'code': {
'description': _('INSEE code (or multiple codes separated with commas)'),
'example_value': '75056',
},
'region_code': {'description': _('Region code'), 'example_value': '11'},
'department_code': {'description': _('Department code'), 'example_value': '75'},
},
)
def cities(self, request, id=None, q=None, code=None, region_code=None, department_code=None):
cities = CityModel.objects.all()
if id is not None:
@ -267,8 +305,9 @@ class BaseAdresse(BaseResource):
cities = cities.filter(code=code, zipcode=zipcode)
if q:
unaccented_q = simplify(q)
cities = cities.filter(Q(unaccent_name__istartswith=unaccented_q) |
Q(zipcode__istartswith=unaccented_q))
cities = cities.filter(
Q(unaccent_name__istartswith=unaccented_q) | Q(zipcode__istartswith=unaccented_q)
)
if code:
if ',' in code:
codes = [c.strip() for c in code.split(',')]
@ -283,13 +322,14 @@ class BaseAdresse(BaseResource):
cities = cities.select_related('department', 'region')
return {'data': [city.to_json() for city in cities]}
@endpoint(description=_('Departments list'),
parameters={
'id': {'description': _('Get exactly one department using its code'),
'example_value': '59'},
'q': {'description': _('Search text in name or code'), 'example_value': 'Nord'},
'region_code': {'description': _('Region code'), 'example_value': '32'},
})
@endpoint(
description=_('Departments list'),
parameters={
'id': {'description': _('Get exactly one department using its code'), 'example_value': '59'},
'q': {'description': _('Search text in name or code'), 'example_value': 'Nord'},
'region_code': {'description': _('Region code'), 'example_value': '32'},
},
)
def departments(self, request, id=None, q=None, region_code=None):
departments = DepartmentModel.objects.all()
@ -297,21 +337,22 @@ class BaseAdresse(BaseResource):
departments = departments.filter(code=id)
if q:
unaccented_q = simplify(q)
departments = departments.filter(Q(unaccent_name__istartswith=unaccented_q) |
Q(code__istartswith=unaccented_q))
departments = departments.filter(
Q(unaccent_name__istartswith=unaccented_q) | Q(code__istartswith=unaccented_q)
)
if region_code:
departments = departments.filter(region__code=region_code)
departments = departments.select_related('region')
return {'data': [department.to_json() for department in departments]}
@endpoint(description=_('Regions list'),
parameters={
'id': {'description': _('Get exactly one region using its code'),
'example_value': '32'},
'q': {'description': _('Search text in name or code'),
'example_value': 'Hauts-de-France'},
})
@endpoint(
description=_('Regions list'),
parameters={
'id': {'description': _('Get exactly one region using its code'), 'example_value': '32'},
'q': {'description': _('Search text in name or code'), 'example_value': 'Hauts-de-France'},
},
)
def regions(self, request, id=None, q=None):
regions = RegionModel.objects.all()
@ -319,8 +360,9 @@ class BaseAdresse(BaseResource):
regions = regions.filter(code=id)
if q:
unaccented_q = simplify(q)
regions = regions.filter(Q(unaccent_name__istartswith=unaccented_q) |
Q(code__istartswith=unaccented_q))
regions = regions.filter(
Q(unaccent_name__istartswith=unaccented_q) | Q(code__istartswith=unaccented_q)
)
return {'data': [region.to_json() for region in regions]}
@ -362,7 +404,10 @@ class BaseAdresse(BaseResource):
for department in departments:
ban_gz = self.requests.get(
'https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-{}.ndjson.gz'.format(department))
'https://adresse.data.gouv.fr/data/ban/adresses/latest/addok/adresses-addok-{}.ndjson.gz'.format(
department
)
)
if ban_gz.status_code != 200:
continue
@ -386,7 +431,8 @@ class BaseAdresse(BaseResource):
'city': street_info['city'],
'zipcode': street_info['postcode'],
'type': street_info['type'],
})
},
)
if line is _not_found:
raise Exception('bano file is empty')
@ -409,8 +455,7 @@ class BaseAdresse(BaseResource):
except ValueError:
error = 'invalid json, got: %s' % response.text
if error:
self.logger.error('failed to update api geo data for endpoint %s: %s',
endpoint, error)
self.logger.error('failed to update api geo data for endpoint %s: %s', endpoint, error)
return
if not result:
raise Exception('api geo returns empty json')
@ -449,8 +494,7 @@ class BaseAdresse(BaseResource):
defaults['department'] = DepartmentModel.objects.get(code=data['codeDepartement'])
if data.get('codeRegion'):
defaults['region'] = RegionModel.objects.get(code=data['codeRegion'])
CityModel.objects.update_or_create(
code=data['code'], zipcode=zipcode, defaults=defaults)
CityModel.objects.update_or_create(code=data['code'], zipcode=zipcode, defaults=defaults)
CityModel.objects.filter(last_update__lt=start_update).delete()
def clean_addresses_cache(self):
@ -478,7 +522,6 @@ class BaseAdresse(BaseResource):
class UnaccentNameMixin(object):
def save(self, *args, **kwargs):
self.unaccent_name = simplify(self.name)
super(UnaccentNameMixin, self).save(*args, **kwargs)

View File

@ -6,4 +6,5 @@ from passerelle.apps.bdp.models import Bdp
class BdpAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
admin.site.register(Bdp, BdpAdmin)

View File

@ -14,16 +14,41 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Bdp',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('title', models.CharField(verbose_name='Title', max_length=50)),
('slug', models.SlugField(verbose_name='Identifier', unique=True)),
('description', models.TextField(verbose_name='Description')),
('service_url', models.CharField(help_text='BDP Web Service URL', max_length=128, verbose_name='Service URL')),
(
'service_url',
models.CharField(
help_text='BDP Web Service URL', max_length=128, verbose_name='Service URL'
),
),
('username', models.CharField(max_length=128, verbose_name='Username', blank=True)),
('password', models.CharField(max_length=128, verbose_name='Password', blank=True)),
('verify_cert', models.BooleanField(default=True, verbose_name='Check HTTPS Certificate validity')),
('keystore', models.FileField(help_text='Certificate and private key in PEM format', upload_to=b'bdp', null=True, verbose_name='Keystore', blank=True)),
('users', models.ManyToManyField(to='base.ApiUser', related_name='_bdp_users_+', related_query_name='+', blank=True)),
(
'verify_cert',
models.BooleanField(default=True, verbose_name='Check HTTPS Certificate validity'),
),
(
'keystore',
models.FileField(
help_text='Certificate and private key in PEM format',
upload_to=b'bdp',
null=True,
verbose_name='Keystore',
blank=True,
),
),
(
'users',
models.ManyToManyField(
to='base.ApiUser', related_name='_bdp_users_+', related_query_name='+', blank=True
),
),
],
options={
'verbose_name': 'BDP Web Service',

View File

@ -14,7 +14,13 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='bdp',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Debug Enabled',
blank=True,
choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')],
),
preserve_default=True,
),
]

View File

@ -14,7 +14,12 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='bdp',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Log Level',
choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')],
),
preserve_default=True,
),
]

View File

@ -14,7 +14,19 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='bdp',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Log Level',
choices=[
(b'NOTSET', b'NOTSET'),
(b'DEBUG', b'DEBUG'),
(b'INFO', b'INFO'),
(b'WARNING', b'WARNING'),
(b'ERROR', b'ERROR'),
(b'CRITICAL', b'CRITICAL'),
],
),
preserve_default=True,
),
]

View File

@ -8,20 +8,21 @@ from django.utils.translation import ugettext_lazy as _
from passerelle.base.models import BaseResource
class Bdp(BaseResource):
service_url = models.CharField(max_length=128, blank=False,
verbose_name=_('Service URL'),
help_text=_('BDP Web Service URL'))
username = models.CharField(max_length=128, blank=True,
verbose_name=_('Username'))
password = models.CharField(max_length=128, blank=True,
verbose_name=_('Password'))
verify_cert = models.BooleanField(default=True,
verbose_name=_('Check HTTPS Certificate validity'))
keystore = models.FileField(upload_to='bdp',
blank=True, null=True,
verbose_name=_('Keystore'),
help_text=_('Certificate and private key in PEM format'))
service_url = models.CharField(
max_length=128, blank=False, verbose_name=_('Service URL'), help_text=_('BDP Web Service URL')
)
username = models.CharField(max_length=128, blank=True, verbose_name=_('Username'))
password = models.CharField(max_length=128, blank=True, verbose_name=_('Password'))
verify_cert = models.BooleanField(default=True, verbose_name=_('Check HTTPS Certificate validity'))
keystore = models.FileField(
upload_to='bdp',
blank=True,
null=True,
verbose_name=_('Keystore'),
help_text=_('Certificate and private key in PEM format'),
)
category = _('Business Process Connectors')
@ -40,16 +41,13 @@ class Bdp(BaseResource):
def get_api(self, endpoint, **params):
options = self.requests_options()
return requests.get(self.service_url + '/api/' + endpoint,
params=params, **options).json()
return requests.get(self.service_url + '/api/' + endpoint, params=params, **options).json()
def post_api(self, endpoint, obj):
data = json.dumps(obj)
headers = {'Content-Type': 'application/json'}
options = self.requests_options()
request = requests.post(
self.service_url + '/api/' + endpoint,
data=data, headers=headers, **options)
request = requests.post(self.service_url + '/api/' + endpoint, data=data, headers=headers, **options)
result = {
'status_code': request.status_code,
'x_request_id': request.headers.get('x-request-id'),

View File

@ -5,5 +5,9 @@ from .views import BdpDetailView, ResourcesView, PostAdherentView
urlpatterns = [
url(r'^(?P<slug>[\w,-]+)/$', BdpDetailView.as_view(), name='bdp-view'),
url(r'^(?P<slug>[\w,-]+)/(?P<resources>[\w,-]+)/$', ResourcesView.as_view(), name='bdp-resources'),
url(r'^(?P<slug>[\w,-]+)/post/adherent/$', csrf_exempt(PostAdherentView.as_view()), name='bdp-post-adherent'),
url(
r'^(?P<slug>[\w,-]+)/post/adherent/$',
csrf_exempt(PostAdherentView.as_view()),
name='bdp-post-adherent',
),
]

View File

@ -40,16 +40,20 @@ class PostAdherentView(View, SingleObjectMixin):
@utils.protected_api('can_access')
@utils.to_json()
def post(self, request, *args, **kwargs):
data = json_loads(request.body) # JSON w.c.s. formdata
data = json_loads(request.body) # JSON w.c.s. formdata
date_de_naissance = data['fields'].get('date_de_naissance')
# force 1973-04-18T00:00:00Z
date_de_naissance = date_de_naissance[:10] + 'T00:00:00Z'
abonnements = data['fields'].get('abonnements_raw') or \
data['fields'].get('abonnements_raw') or \
request.GET.get('abonnements')
bibliotheque_id = data['fields'].get('bibliotheque_raw') or \
data['fields'].get('bibliotheque') or \
request.GET.get('bibliotheque')
abonnements = (
data['fields'].get('abonnements_raw')
or data['fields'].get('abonnements_raw')
or request.GET.get('abonnements')
)
bibliotheque_id = (
data['fields'].get('bibliotheque_raw')
or data['fields'].get('bibliotheque')
or request.GET.get('bibliotheque')
)
adherent = {
'nom': data['fields'].get('nom'),
'prenom': data['fields'].get('prenom'),

View File

@ -18,11 +18,19 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='CartaDSCS',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('title', models.CharField(max_length=50, verbose_name='Title')),
('description', models.TextField(verbose_name='Description')),
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
('wsdl_base_url', models.URLField(help_text='ex: https://example.net/adscs/webservices/', verbose_name='WSDL Base URL')),
(
'wsdl_base_url',
models.URLField(
help_text='ex: https://example.net/adscs/webservices/', verbose_name='WSDL Base URL'
),
),
('username', models.CharField(max_length=64, verbose_name='Username')),
('password', models.CharField(max_length=64, verbose_name='Password')),
('iv', models.CharField(max_length=16, verbose_name='Initialisation Vector')),
@ -31,7 +39,15 @@ class Migration(migrations.Migration):
('ftp_username', models.CharField(max_length=64, verbose_name='FTP Username')),
('ftp_password', models.CharField(max_length=64, verbose_name='FTP Password')),
('ftp_client_name', models.CharField(max_length=64, verbose_name='FTP Client Name')),
('users', models.ManyToManyField(blank=True, related_name='_cartadscs_users_+', related_query_name='+', to='base.ApiUser')),
(
'users',
models.ManyToManyField(
blank=True,
related_name='_cartadscs_users_+',
related_query_name='+',
to='base.ApiUser',
),
),
],
options={
'verbose_name': 'Cart@DS CS',
@ -40,7 +56,10 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='CartaDSDossier',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('email', models.CharField(max_length=256)),
('tracking_code', models.CharField(max_length=20)),
('commune_id', models.CharField(max_length=20)),
@ -59,10 +78,16 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='CartaDSFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('tracking_code', models.CharField(max_length=20)),
('id_piece', models.CharField(max_length=20)),
('uploaded_file', models.FileField(upload_to=passerelle.apps.cartads_cs.models.cartads_file_location)),
(
'uploaded_file',
models.FileField(upload_to=passerelle.apps.cartads_cs.models.cartads_file_location),
),
('last_update_datetime', models.DateTimeField(auto_now=True)),
],
),

View File

@ -16,7 +16,10 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='CartaDSDataCache',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('data_type', models.CharField(max_length=50)),
('data_parameters', django.contrib.postgres.fields.jsonb.JSONField(default={})),
('data_values', django.contrib.postgres.fields.jsonb.JSONField(default={})),

View File

@ -15,6 +15,12 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='cartadscs',
name='client_name',
field=models.CharField(blank=True, help_text='Only useful in shared environments.', max_length=64, null=True, verbose_name='Client Name'),
field=models.CharField(
blank=True,
help_text='Only useful in shared environments.',
max_length=64,
null=True,
verbose_name='Client Name',
),
),
]

View File

@ -15,7 +15,10 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='CartaDSSubscriber',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('name_id', models.CharField(max_length=32, null=True)),
],
),

File diff suppressed because it is too large Load Diff

View File

@ -14,7 +14,10 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='ChoositRegisterGateway',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('title', models.CharField(verbose_name='Title', max_length=50)),
('slug', models.SlugField(verbose_name='Identifier', unique=True)),
('description', models.TextField(verbose_name='Description')),
@ -30,7 +33,10 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='ChoositRegisterNewsletter',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('name', models.CharField(max_length=16)),
('description', models.CharField(max_length=128, blank=True)),
],
@ -43,13 +49,27 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='ChoositSMSGateway',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('title', models.CharField(verbose_name='Title', max_length=50)),
('slug', models.SlugField(verbose_name='Identifier', unique=True)),
('description', models.TextField(verbose_name='Description')),
('key', models.CharField(max_length=64, verbose_name='Key')),
('default_country_code', models.CharField(default='33', max_length=3, verbose_name='Default country code')),
('users', models.ManyToManyField(to='base.ApiUser', related_name='_choositsmsgateway_users_+', related_query_name='+', blank=True)),
(
'default_country_code',
models.CharField(default='33', max_length=3, verbose_name='Default country code'),
),
(
'users',
models.ManyToManyField(
to='base.ApiUser',
related_name='_choositsmsgateway_users_+',
related_query_name='+',
blank=True,
),
),
],
options={
'db_table': 'sms_choosit',

View File

@ -14,13 +14,25 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='choositregistergateway',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Debug Enabled',
blank=True,
choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')],
),
preserve_default=True,
),
migrations.AddField(
model_name='choositsmsgateway',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Debug Enabled',
blank=True,
choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')],
),
preserve_default=True,
),
]

View File

@ -14,13 +14,23 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='choositregistergateway',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Log Level',
choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')],
),
preserve_default=True,
),
migrations.AlterField(
model_name='choositsmsgateway',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Log Level',
choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')],
),
preserve_default=True,
),
]

View File

@ -14,13 +14,37 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='choositregistergateway',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Log Level',
choices=[
(b'NOTSET', b'NOTSET'),
(b'DEBUG', b'DEBUG'),
(b'INFO', b'INFO'),
(b'WARNING', b'WARNING'),
(b'ERROR', b'ERROR'),
(b'CRITICAL', b'CRITICAL'),
],
),
preserve_default=True,
),
migrations.AlterField(
model_name='choositsmsgateway',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Log Level',
choices=[
(b'NOTSET', b'NOTSET'),
(b'DEBUG', b'DEBUG'),
(b'INFO', b'INFO'),
(b'WARNING', b'WARNING'),
(b'ERROR', b'ERROR'),
(b'CRITICAL', b'CRITICAL'),
],
),
preserve_default=True,
),
]

View File

@ -26,8 +26,8 @@ class ChoositSMSGateway(SMSResource):
'data': [
[u'0033688888888', u'Choosit error: bad JSON response'],
[u'0033677777777', u'Choosit error: bad JSON response'],
]
}
],
},
},
{
'response': {
@ -40,7 +40,7 @@ class ChoositSMSGateway(SMSResource):
[u'0033688888888', u'Choosit error: not ok'],
[u'0033677777777', u'Choosit error: not ok'],
],
}
},
},
{
'response': {
@ -53,9 +53,8 @@ class ChoositSMSGateway(SMSResource):
[u'0033688888888', {'result': u'Envoi terminé', 'sms_id': 1234}],
[u'0033677777777', {'result': u'Envoi terminé', 'sms_id': 1234}],
],
}
}
},
},
],
}
URL = 'http://sms.choosit.com/webservice'
@ -97,6 +96,5 @@ class ChoositSMSGateway(SMSResource):
else:
results.append(output)
if any(isinstance(result, string_types) for result in results):
raise APIError('Choosit error: some destinations failed',
data=list(zip(destinations, results)))
raise APIError('Choosit error: some destinations failed', data=list(zip(destinations, results)))
return list(zip(destinations, results))

View File

@ -32,37 +32,30 @@ CERTIFICATE_TYPES = [
{"id": "NAI", "text": "Naissance"},
{"id": "MAR", "text": "Mariage"},
{"id": "REC", "text": "Reconnaissance"},
{"id": "DEC", "text": "Décès"}
{"id": "DEC", "text": "Décès"},
]
SEXES = [
{"id": "M", "text": "Homme"},
{"id": "F", "text": "Femme"},
{"id": "NA", "text": "Autre"}
]
SEXES = [{"id": "M", "text": "Homme"}, {"id": "F", "text": "Femme"}, {"id": "NA", "text": "Autre"}]
TITLES = [
{"id": "M", "text": "Monsieur"},
{"id": "Mme", "text": "Madame"},
{"id": "Mlle", "text": "Mademoiselle"}
{"id": "Mlle", "text": "Mademoiselle"},
]
DOCUMENT_TYPES = [
{"id": "CPI", "text": "Copie intégrale"},
{"id": "EXTAF", "text": "Extrait avec filiation"},
{"id": "EXTSF", "text": "Extrait sans filiation"},
{"id": "EXTPL", "text": "Extrait plurilingue"}
{"id": "EXTPL", "text": "Extrait plurilingue"},
]
CONCERNED = [
{"id": "reconnu", "text": "Reconnu"},
{"id": "auteur", "text": "Auteur"}
]
CONCERNED = [{"id": "reconnu", "text": "Reconnu"}, {"id": "auteur", "text": "Auteur"}]
ORIGINS = [
{"id": "internet", "text": "Internet"},
{"id": "guichet", "text": "Guichet"},
{"id": "courrier", "text": "Courrier"}
{"id": "courrier", "text": "Courrier"},
]
@ -73,8 +66,8 @@ def is_clean(element):
class BaseType(object):
"""Base data binding object
"""
"""Base data binding object"""
tagname = None
def __repr__(self):
@ -82,8 +75,7 @@ class BaseType(object):
@classmethod
def make_element(cls, tagname, value=None, namespace=None, nsmap=None):
M = xobject.ElementMaker(annotate=False, namespace=namespace,
nsmap=nsmap)
M = xobject.ElementMaker(annotate=False, namespace=namespace, nsmap=nsmap)
return M(tagname, value)
@property
@ -115,19 +107,17 @@ class CityWebType(BaseType):
class SimpleType(CityWebType):
"""Data binding class for SimpleType
"""
"""Data binding class for SimpleType"""
allowed_values = None
def __init__(self, value):
if value not in self.allowed_values:
raise APIError('<%s> value (%s) not in %s' % (self.tagname, value,
self.allowed_values))
raise APIError('<%s> value (%s) not in %s' % (self.tagname, value, self.allowed_values))
self.value = value
class DateType(CityWebType):
def __init__(self, value):
try:
self.value = parse_date(value)
@ -139,8 +129,8 @@ class DateType(CityWebType):
class ComplexType(CityWebType):
"""Data binding class for ComplexType
"""
"""Data binding class for ComplexType"""
sequence = None
pattern = None
@ -229,8 +219,7 @@ class Place(ComplexType):
class Address(ComplexType):
tagname = 'adresse'
sequence = ('ligneAdr1', 'ligneAdr2', 'codePostal',
'lieu', 'mail', 'tel')
sequence = ('ligneAdr1', 'ligneAdr2', 'codePostal', 'lieu', 'mail', 'tel')
pattern = 'address_'
def __init__(self, data):
@ -273,8 +262,7 @@ class EventPlace(Place):
class Person(ComplexType):
sequence = ('noms', 'prenoms', 'genre', 'adresse', 'sexe',
'pere', 'mere', 'naissance')
sequence = ('noms', 'prenoms', 'genre', 'adresse', 'sexe', 'pere', 'mere', 'naissance')
def __init__(self, data):
super(Person, self).__init__(data)
@ -314,8 +302,7 @@ class Parent(Person):
class ConcernedCommon(Person):
sequence = ('noms', 'prenoms', 'genre', 'sexe',
'parent1', 'parent2', 'naissance')
sequence = ('noms', 'prenoms', 'genre', 'sexe', 'parent1', 'parent2', 'naissance')
def __init__(self, data):
super(ConcernedCommon, self).__init__(data)
@ -344,8 +331,7 @@ class Applicant(ComplexType):
class Event(ComplexType):
tagname = 'evenement'
sequence = ('interesse', 'conjoint', 'natureEvenement',
'typeInteresse', 'dateEvenement', 'lieuEvenement')
sequence = ('interesse', 'conjoint', 'natureEvenement', 'typeInteresse', 'dateEvenement', 'lieuEvenement')
def __init__(self, data):
certificate_type = data['certificate_type']
@ -362,8 +348,16 @@ class Event(ComplexType):
class CivilStatusApplication(ComplexType):
tagname = 'demandeEtatCivil'
sequence = (
'identifiant', 'demandeur', 'natureDocument', 'nbExemplaire',
'dateDemande', 'evenement', 'motif', 'origine', 'commentaire')
'identifiant',
'demandeur',
'natureDocument',
'nbExemplaire',
'dateDemande',
'evenement',
'motif',
'origine',
'commentaire',
)
def __init__(self, data):
self.identifiant = data['application_id']
@ -388,5 +382,5 @@ class CivilStatusApplication(ComplexType):
with atomic_write(filepath) as fd:
fd.write(force_bytes(content))
# set read only permission for owner and group
os.chmod(filepath, stat.S_IRUSR|stat.S_IRGRP)
os.chmod(filepath, stat.S_IRUSR | stat.S_IRGRP)
return filename

View File

@ -14,12 +14,35 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='CityWeb',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('title', models.CharField(verbose_name='Title', max_length=50)),
('slug', models.SlugField(verbose_name='Identifier', unique=True)),
('description', models.TextField(verbose_name='Description')),
('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])),
('users', models.ManyToManyField(to='base.ApiUser', related_name='_cityweb_users_+', related_query_name='+', blank=True)),
(
'log_level',
models.CharField(
default=b'INFO',
max_length=10,
verbose_name='Log Level',
choices=[
(b'NOTSET', b'NOTSET'),
(b'DEBUG', b'DEBUG'),
(b'INFO', b'INFO'),
(b'WARNING', b'WARNING'),
(b'ERROR', b'ERROR'),
(b'CRITICAL', b'CRITICAL'),
],
),
),
(
'users',
models.ManyToManyField(
to='base.ApiUser', related_name='_cityweb_users_+', related_query_name='+', blank=True
),
),
],
options={
'verbose_name': "CityWeb - Demande d'acte d'\xe9tat civil",

View File

@ -24,8 +24,15 @@ from passerelle.compat import json_loads
from passerelle.utils.api import endpoint
from passerelle.utils.jsonresponse import APIError
from .cityweb import (CivilStatusApplication, TITLES, SEXES, DOCUMENT_TYPES,
CERTIFICATE_TYPES, CONCERNED, ORIGINS)
from .cityweb import (
CivilStatusApplication,
TITLES,
SEXES,
DOCUMENT_TYPES,
CERTIFICATE_TYPES,
CONCERNED,
ORIGINS,
)
class CityWeb(BaseResource):
@ -52,8 +59,7 @@ class CityWeb(BaseResource):
@property
def basepath(self):
return os.path.join(
default_storage.path('cityweb'), self.slug)
return os.path.join(default_storage.path('cityweb'), self.slug)
@endpoint(perm='can_access', description=_('Get title list'))
def titles(self, request):
@ -71,14 +77,20 @@ class CityWeb(BaseResource):
def origins(self, request):
return {'data': ORIGINS}
@endpoint(name='certificate-types', perm='can_access',
description=_('Get certificate type list'), parameters={'exclude': {'example_value': 'REC'}})
@endpoint(
name='certificate-types',
perm='can_access',
description=_('Get certificate type list'),
parameters={'exclude': {'example_value': 'REC'}},
)
def certificate_types(self, request, exclude=''):
return {'data': [item for item in CERTIFICATE_TYPES
if item.get('id') not in exclude.split(',')]}
return {'data': [item for item in CERTIFICATE_TYPES if item.get('id') not in exclude.split(',')]}
@endpoint(name='document-types', perm='can_access',
description=_('Get document type list'), parameters={'exclude': {'example_value': 'EXTPL'}})
@endpoint(
name='document-types',
perm='can_access',
description=_('Get document type list'),
parameters={'exclude': {'example_value': 'EXTPL'}},
)
def document_types(self, request, exclude=''):
return {'data': [item for item in DOCUMENT_TYPES
if item.get('id') not in exclude.split(',')]}
return {'data': [item for item in DOCUMENT_TYPES if item.get('id') not in exclude.split(',')]}

View File

@ -6,8 +6,6 @@ from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
dependencies = []
operations = [
]
operations = []

View File

@ -7,7 +7,14 @@ from django.db import migrations, models
class Migration(migrations.Migration):
replaces = [('clicrdv', '0001_initial'), ('clicrdv', '0002_clicrdv_group_id'), ('clicrdv', '0003_auto_20160920_0903'), ('clicrdv', '0004_newclicrdv'), ('clicrdv', '0005_auto_20161218_1701'), ('clicrdv', '0006_auto_20170920_0951')]
replaces = [
('clicrdv', '0001_initial'),
('clicrdv', '0002_clicrdv_group_id'),
('clicrdv', '0003_auto_20160920_0903'),
('clicrdv', '0004_newclicrdv'),
('clicrdv', '0005_auto_20161218_1701'),
('clicrdv', '0006_auto_20170920_0951'),
]
initial = True
@ -20,19 +27,59 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='ClicRdv',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('title', models.CharField(max_length=50, verbose_name='Title')),
('slug', models.SlugField(verbose_name='Identifier', unique=True)),
('description', models.TextField(verbose_name='Description')),
('log_level', models.CharField(choices=[('NOTSET', 'NOTSET'), ('DEBUG', 'DEBUG'), ('INFO', 'INFO'), ('WARNING', 'WARNING'), ('ERROR', 'ERROR'), ('CRITICAL', 'CRITICAL')], default='INFO', max_length=10, verbose_name='Log Level')),
('server', models.CharField(choices=[('www.clicrdv.com', 'Production (www.clicrdv.com)'), ('sandbox.clicrdv.com', 'SandBox (sandbox.clicrdv.com)')], default='sandbox.clicrdv.com', max_length=64, verbose_name='Server')),
(
'log_level',
models.CharField(
choices=[
('NOTSET', 'NOTSET'),
('DEBUG', 'DEBUG'),
('INFO', 'INFO'),
('WARNING', 'WARNING'),
('ERROR', 'ERROR'),
('CRITICAL', 'CRITICAL'),
],
default='INFO',
max_length=10,
verbose_name='Log Level',
),
),
(
'server',
models.CharField(
choices=[
('www.clicrdv.com', 'Production (www.clicrdv.com)'),
('sandbox.clicrdv.com', 'SandBox (sandbox.clicrdv.com)'),
],
default='sandbox.clicrdv.com',
max_length=64,
verbose_name='Server',
),
),
('group_id', models.IntegerField(default=0, verbose_name='Group Id')),
('apikey', models.CharField(max_length=64, verbose_name='API Key')),
('username', models.CharField(max_length=64, verbose_name='Username')),
('password', models.CharField(max_length=64, verbose_name='Password')),
('websource', models.CharField(blank=True, max_length=64, null=True, verbose_name='Web source')),
('default_comment', models.CharField(blank=True, max_length=250, null=True, verbose_name='Default comment')),
('users', models.ManyToManyField(blank=True, related_name='_clicrdv_users_+', related_query_name='+', to='base.ApiUser')),
(
'websource',
models.CharField(blank=True, max_length=64, null=True, verbose_name='Web source'),
),
(
'default_comment',
models.CharField(blank=True, max_length=250, null=True, verbose_name='Default comment'),
),
(
'users',
models.ManyToManyField(
blank=True, related_name='_clicrdv_users_+', related_query_name='+', to='base.ApiUser'
),
),
],
options={
'verbose_name': 'Clicrdv Agenda',

View File

@ -10,5 +10,4 @@ class Migration(migrations.Migration):
('clicrdv', '0001_initial'),
]
operations = [
]
operations = []

View File

@ -7,13 +7,13 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
dependencies = [
('clicrdv', '0001_squashed_0006_auto_20170920_0951'),
]
]
operations = [
operations = [
migrations.RemoveField(
model_name='clicrdv',
name='log_level',
),
]
]

View File

@ -10,5 +10,4 @@ class Migration(migrations.Migration):
('clicrdv', '0002_clicrdv_group_id'),
]
operations = [
]
operations = []

View File

@ -3,6 +3,7 @@ from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
@ -15,19 +16,56 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='NewClicRdv',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('title', models.CharField(verbose_name='Title', max_length=50)),
('slug', models.SlugField(verbose_name='Identifier')),
('description', models.TextField(verbose_name='Description')),
('log_level', models.CharField(default='NOTSET', max_length=10, verbose_name='Log Level', choices=[('NOTSET', 'NOTSET'), ('DEBUG', 'DEBUG'), ('INFO', 'INFO'), ('WARNING', 'WARNING'), ('ERROR', 'ERROR'), ('CRITICAL', 'CRITICAL'), ('FATAL', 'FATAL')])),
('server', models.CharField(default='sandbox.clicrdv.com', max_length=64, choices=[('www.clicrdv.com', 'Production (www.clicrdv.com)'), ('sandbox.clicrdv.com', 'SandBox (sandbox.clicrdv.com)')])),
(
'log_level',
models.CharField(
default='NOTSET',
max_length=10,
verbose_name='Log Level',
choices=[
('NOTSET', 'NOTSET'),
('DEBUG', 'DEBUG'),
('INFO', 'INFO'),
('WARNING', 'WARNING'),
('ERROR', 'ERROR'),
('CRITICAL', 'CRITICAL'),
('FATAL', 'FATAL'),
],
),
),
(
'server',
models.CharField(
default='sandbox.clicrdv.com',
max_length=64,
choices=[
('www.clicrdv.com', 'Production (www.clicrdv.com)'),
('sandbox.clicrdv.com', 'SandBox (sandbox.clicrdv.com)'),
],
),
),
('group_id', models.IntegerField(default=0)),
('apikey', models.CharField(max_length=64)),
('username', models.CharField(max_length=64)),
('password', models.CharField(max_length=64)),
('websource', models.CharField(max_length=64, null=True, blank=True)),
('default_comment', models.CharField(max_length=250, null=True, blank=True)),
('users', models.ManyToManyField(to='base.ApiUser', related_name='_newclicrdv_users_+', related_query_name='+', blank=True)),
(
'users',
models.ManyToManyField(
to='base.ApiUser',
related_name='_newclicrdv_users_+',
related_query_name='+',
blank=True,
),
),
],
options={
'verbose_name': 'Clicrdv Agenda',

View File

@ -22,15 +22,14 @@ from passerelle.utils.api import endpoint
CLICRDV_SERVERS = (
('www.clicrdv.com', 'Production (www.clicrdv.com)'),
('sandbox.clicrdv.com', 'SandBox (sandbox.clicrdv.com)')
('sandbox.clicrdv.com', 'SandBox (sandbox.clicrdv.com)'),
)
class ClicRdv(BaseResource):
server = models.CharField(
_('Server'),
max_length=64,
choices=CLICRDV_SERVERS,
default='sandbox.clicrdv.com')
_('Server'), max_length=64, choices=CLICRDV_SERVERS, default='sandbox.clicrdv.com'
)
group_id = models.IntegerField(_('Group Id'), default=0)
apikey = models.CharField(_('API Key'), max_length=64)
username = models.CharField(_('Username'), max_length=64)
@ -120,8 +119,7 @@ class ClicRdv(BaseResource):
datetimes = []
for timeslot in self.get_available_timeslots(intervention):
parsed = datetime.datetime.strptime(timeslot, '%Y-%m-%d %H:%M:%S')
datetimed = {'id': parsed.strftime('%Y-%m-%d-%H:%M:%S'),
'text': date_format(parsed, 'j F Y H:i')}
datetimed = {'id': parsed.strftime('%Y-%m-%d-%H:%M:%S'), 'text': date_format(parsed, 'j F Y H:i')}
datetimes.append(datetimed)
datetimes.sort(key=lambda x: x.get('id'))
return datetimes
@ -130,8 +128,7 @@ class ClicRdv(BaseResource):
dates = []
for timeslot in self.get_available_timeslots(intervention):
parsed = datetime.datetime.strptime(timeslot, '%Y-%m-%d %H:%M:%S')
date = {'id': parsed.strftime('%Y-%m-%d'),
'text': date_format(parsed, 'j F Y')}
date = {'id': parsed.strftime('%Y-%m-%d'), 'text': date_format(parsed, 'j F Y')}
if date in dates:
continue
dates.append(date)
@ -142,12 +139,11 @@ class ClicRdv(BaseResource):
if not date:
raise Exception('no date value')
times = []
for timeslot in self.get_available_timeslots(intervention,
date_start='%s 00:00:00' % date,
date_end='%s 23:59:59' % date):
for timeslot in self.get_available_timeslots(
intervention, date_start='%s 00:00:00' % date, date_end='%s 23:59:59' % date
):
parsed = datetime.datetime.strptime(timeslot, '%Y-%m-%d %H:%M:%S')
timed = {'id': parsed.strftime('%H:%M:%S'),
'text': time_format(parsed, 'H:i')}
timed = {'id': parsed.strftime('%H:%M:%S'), 'text': time_format(parsed, 'H:i')}
times.append(timed)
times.sort(key=lambda x: x.get('id'))
return times
@ -158,12 +154,13 @@ class ClicRdv(BaseResource):
return response
return {'success': True}
def create_appointment(self, intervention, websource, data):
fields = data.get('fields') or {}
extra = data.get('extra') or {}
def get_data(key, default=None):
return data.get(key) or extra.get(key) or fields.get(key) or default
if intervention:
intervention = int(intervention)
else:
@ -184,17 +181,17 @@ class ClicRdv(BaseResource):
'email': get_data('clicrdv_email', ''),
'firstphone': get_data('clicrdv_firstphone', ''),
'secondphone': get_data('clicrdv_secondphone', ''),
},
'date': date,
'intervention_ids': [intervention],
'websource': websource,
},
'date': date,
'intervention_ids': [intervention],
'websource': websource,
},
}
comments = get_data('clicrdv_comments') or self.default_comment
if comments:
appointment['comments'] = comments
# optional parameters, if any...
for fieldname in (list(fields.keys()) + list(extra.keys()) + list(data.keys())):
for fieldname in list(fields.keys()) + list(extra.keys()) + list(data.keys()):
if fieldname.startswith('clicrdv_fiche_'):
appointment['appointment']['fiche'][fieldname[14:]] = get_data(fieldname) or ''
response = self.request('appointments', 'post', json=appointment)

View File

@ -7,17 +7,34 @@ from passerelle.apps.clicrdv.views import *
urlpatterns = [
url(r'^(?P<slug>[\w,-]+)/$', ClicRdvDetailView.as_view(), name='clicrdv-view'),
url(r'^(?P<slug>[\w,-]+)/interventions/(?P<intervention_id>\d+)/datetimes/$',
DateTimesView.as_view(), name='clicrdv-datetimes'),
url(r'^(?P<slug>[\w,-]+)/interventions/(?P<intervention_id>\d+)/dates/$',
DatesView.as_view(), name='clicrdv-dates'),
url(r'^(?P<slug>[\w,-]+)/interventions/(?P<intervention_id>\d+)/(?P<date>[\d-]+)/times$',
TimesView.as_view(), name='clicrdv-times'),
url(r'^(?P<slug>[\w,-]+)/interventions/(?P<intervention_id>\d+)/create$',
csrf_exempt(CreateAppointmentView.as_view()), name='clicrdv-create-appointment'),
url(r'^(?P<slug>[\w,-]+)/create$',
csrf_exempt(CreateAppointmentView.as_view()), name='clicrdv-create-appointment-qs'),
url(r'^(?P<slug>[\w,-]+)/(?P<appointment_id>\d+)/cancel$',
CancelAppointmentView.as_view(), name='clicrdv-cancel-appointment'),
url(
r'^(?P<slug>[\w,-]+)/interventions/(?P<intervention_id>\d+)/datetimes/$',
DateTimesView.as_view(),
name='clicrdv-datetimes',
),
url(
r'^(?P<slug>[\w,-]+)/interventions/(?P<intervention_id>\d+)/dates/$',
DatesView.as_view(),
name='clicrdv-dates',
),
url(
r'^(?P<slug>[\w,-]+)/interventions/(?P<intervention_id>\d+)/(?P<date>[\d-]+)/times$',
TimesView.as_view(),
name='clicrdv-times',
),
url(
r'^(?P<slug>[\w,-]+)/interventions/(?P<intervention_id>\d+)/create$',
csrf_exempt(CreateAppointmentView.as_view()),
name='clicrdv-create-appointment',
),
url(
r'^(?P<slug>[\w,-]+)/create$',
csrf_exempt(CreateAppointmentView.as_view()),
name='clicrdv-create-appointment-qs',
),
url(
r'^(?P<slug>[\w,-]+)/(?P<appointment_id>\d+)/cancel$',
CancelAppointmentView.as_view(),
name='clicrdv-cancel-appointment',
),
]

View File

@ -16,6 +16,7 @@ class DateTimesView(View, SingleObjectMixin):
input: https//passerelle/clicrdv/foobar/interventions/887/datetimes
"""
model = ClicRdv
@utils.to_json()
@ -31,6 +32,7 @@ class DatesView(View, SingleObjectMixin):
{ data: [ { id: '2014-05-07', text: "7 mai 2014" },
{ id: '2014-05-13', text: "13 mai 2014" } ], err: 0 }
"""
model = ClicRdv
@utils.to_json()
@ -46,6 +48,7 @@ class TimesView(View, SingleObjectMixin):
{ data: [ { id: '15:10:00', text: "15:10" },
{ id: '15:30:00', text: "15:30" } ], err: 0 }
"""
model = ClicRdv
@utils.to_json()
@ -77,6 +80,7 @@ class CreateAppointmentView(View, SingleObjectMixin):
output:
{ data: { 'success': true, 'appointment_id': 123 }, err: 0 }
"""
model = ClicRdv
@utils.protected_api('can_manage_appointment')
@ -85,10 +89,11 @@ class CreateAppointmentView(View, SingleObjectMixin):
if intervention_id is None:
intervention_id = self.request.GET.get('intervention')
data = json_loads(request.body)
return {'data': self.get_object().create_appointment(
intervention_id,
self.request.GET.get('websource'),
data)}
return {
'data': self.get_object().create_appointment(
intervention_id, self.request.GET.get('websource'), data
)
}
class CancelAppointmentView(View, SingleObjectMixin):
@ -98,6 +103,7 @@ class CancelAppointmentView(View, SingleObjectMixin):
output:
{ data: { 'success': true }, err: 0 }
"""
model = ClicRdv
@utils.protected_api('can_manage_appointment')

View File

@ -14,15 +14,48 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='CmisConnector',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('title', models.CharField(verbose_name='Title', max_length=50)),
('slug', models.SlugField(verbose_name='Identifier', unique=True)),
('description', models.TextField(verbose_name='Description')),
('log_level', models.CharField(default=b'INFO', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')])),
('cmis_endpoint', models.URLField(help_text='URL of the CMIS Atom endpoint', max_length=400, verbose_name='CMIS Atom endpoint')),
(
'log_level',
models.CharField(
default=b'INFO',
max_length=10,
verbose_name='Log Level',
choices=[
(b'NOTSET', b'NOTSET'),
(b'DEBUG', b'DEBUG'),
(b'INFO', b'INFO'),
(b'WARNING', b'WARNING'),
(b'ERROR', b'ERROR'),
(b'CRITICAL', b'CRITICAL'),
],
),
),
(
'cmis_endpoint',
models.URLField(
help_text='URL of the CMIS Atom endpoint',
max_length=400,
verbose_name='CMIS Atom endpoint',
),
),
('username', models.CharField(max_length=128, verbose_name='Service username')),
('password', models.CharField(max_length=128, verbose_name='Service password')),
('users', models.ManyToManyField(to='base.ApiUser', related_name='_cmisconnector_users_+', related_query_name='+', blank=True)),
(
'users',
models.ManyToManyField(
to='base.ApiUser',
related_name='_cmisconnector_users_+',
related_query_name='+',
blank=True,
),
),
],
options={
'verbose_name': 'CMIS connector',

View File

@ -54,7 +54,7 @@ UPLOAD_SCHEMA = {
'content': {'type': 'string'},
'content_type': {'type': 'string'},
},
'required': ['content']
'required': ['content'],
},
'filename': {
'type': 'string',
@ -65,10 +65,7 @@ UPLOAD_SCHEMA = {
'pattern': FILE_PATH_PATTERN,
},
'object_type': {'type': 'string'},
'properties': {
'type': 'object',
'additionalProperties': {'type': 'string'}
},
'properties': {'type': 'object', 'additionalProperties': {'type': 'string'}},
},
'required': ['file', 'path'],
'unflatten': True,
@ -77,8 +74,8 @@ UPLOAD_SCHEMA = {
class CmisConnector(BaseResource):
cmis_endpoint = models.URLField(
max_length=400, verbose_name=_('CMIS Atom endpoint'),
help_text=_('URL of the CMIS Atom endpoint'))
max_length=400, verbose_name=_('CMIS Atom endpoint'), help_text=_('URL of the CMIS Atom endpoint')
)
username = models.CharField(max_length=128, verbose_name=_('Service username'))
password = models.CharField(max_length=128, verbose_name=_('Service password'))
category = _('Business Process Connectors')
@ -94,7 +91,8 @@ class CmisConnector(BaseResource):
'application/json': UPLOAD_SCHEMA,
}
}
})
},
)
def uploadfile(self, request, post_data):
error, error_msg, data = self._validate_inputs(post_data)
if error:
@ -114,7 +112,7 @@ class CmisConnector(BaseResource):
return {'data': {'properties': doc.properties}}
def _validate_inputs(self, data):
""" process dict
"""process dict
return a tuple (error, error_msg, data)
"""
file_ = data['file']
@ -149,11 +147,11 @@ def wrap_cmis_error(f):
raise APIError("invalid property name: %s" % e)
except CmisException as e:
raise APIError("cmis binding error: %s" % e)
return wrapper
class CMISGateway(object):
def __init__(self, cmis_endpoint, username, password, logger):
self._cmis_client = CmisClient(cmis_endpoint, username, password)
self._logger = logger
@ -182,11 +180,13 @@ class CMISGateway(object):
return folder
@wrap_cmis_error
def create_doc(self, file_name, file_path, file_byte_content,
content_type=None, object_type=None, properties=None):
def create_doc(
self, file_name, file_path, file_byte_content, content_type=None, object_type=None, properties=None
):
folder = self._get_or_create_folder(file_path)
properties = properties or {}
if object_type:
properties['cmis:objectTypeId'] = object_type
return folder.createDocument(file_name, contentFile=BytesIO(file_byte_content),
contentType=content_type, properties=properties)
return folder.createDocument(
file_name, contentFile=BytesIO(file_byte_content), contentType=content_type, properties=properties
)

View File

@ -19,6 +19,5 @@ from django.conf.urls import include, url
from .views import CmisTypeView
management_urlpatterns = [
url(r'^(?P<connector_slug>[\w,-]+)/type/$',
CmisTypeView.as_view(), name='cmis-type'),
url(r'^(?P<connector_slug>[\w,-]+)/type/$', CmisTypeView.as_view(), name='cmis-type'),
]

View File

@ -30,8 +30,7 @@ class CmisTypeView(TemplateView):
def get(self, request, *args, **kwargs):
self.connector = CmisConnector.objects.get(slug=kwargs['connector_slug'])
client = CmisClient(self.connector.cmis_endpoint, self.connector.username,
self.connector.password)
client = CmisClient(self.connector.cmis_endpoint, self.connector.username, self.connector.password)
self.repo = client.getDefaultRepository()
type_id = request.GET.get('id')

View File

@ -20,7 +20,10 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='CryptedFile',
fields=[
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
(
'uuid',
models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False),
),
('filename', models.CharField(max_length=512)),
('content_type', models.CharField(max_length=128)),
('creation_timestamp', models.DateTimeField(auto_now_add=True)),
@ -29,14 +32,44 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Cryptor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('title', models.CharField(max_length=50, verbose_name='Title')),
('slug', models.SlugField(unique=True, verbose_name='Identifier')),
('description', models.TextField(verbose_name='Description')),
('public_key', models.TextField(blank=True, validators=[passerelle.apps.cryptor.models.validate_rsa_key], verbose_name='Encryption RSA public key (PEM format)')),
('private_key', models.TextField(blank=True, validators=[passerelle.apps.cryptor.models.validate_rsa_key], verbose_name='Decryption RSA private key (PEM format)')),
('redirect_url_base', models.URLField(blank=True, help_text='Base URL for redirect, empty for local', max_length=256, verbose_name='Base URL of decryption system')),
('users', models.ManyToManyField(blank=True, related_name='_cryptor_users_+', related_query_name='+', to='base.ApiUser')),
(
'public_key',
models.TextField(
blank=True,
validators=[passerelle.apps.cryptor.models.validate_rsa_key],
verbose_name='Encryption RSA public key (PEM format)',
),
),
(
'private_key',
models.TextField(
blank=True,
validators=[passerelle.apps.cryptor.models.validate_rsa_key],
verbose_name='Decryption RSA private key (PEM format)',
),
),
(
'redirect_url_base',
models.URLField(
blank=True,
help_text='Base URL for redirect, empty for local',
max_length=256,
verbose_name='Base URL of decryption system',
),
),
(
'users',
models.ManyToManyField(
blank=True, related_name='_cryptor_users_+', related_query_name='+', to='base.ApiUser'
),
),
],
options={
'verbose_name': 'Encryption / Decryption',

View File

@ -52,15 +52,16 @@ FILE_SCHEMA = {
"filename": {"type": "string"},
"content_type": {"type": "string"},
"content": {"type": "string"},
}
},
}
}
},
}
# encrypt and decrypt are borrowed from
# https://www.pycryptodome.org/en/latest/src/examples.html#encrypt-data-with-rsa
def write_encrypt(out_file, data, key_pem):
public_key = RSA.import_key(key_pem)
session_key = get_random_bytes(16)
@ -115,15 +116,18 @@ def validate_rsa_key(key):
class Cryptor(BaseResource):
public_key = models.TextField(blank=True,
verbose_name=_('Encryption RSA public key (PEM format)'),
validators=[validate_rsa_key])
private_key = models.TextField(blank=True,
verbose_name=_('Decryption RSA private key (PEM format)'),
validators=[validate_rsa_key])
redirect_url_base = models.URLField(max_length=256, blank=True,
verbose_name=_('Base URL of decryption system'),
help_text=_('Base URL for redirect, empty for local'))
public_key = models.TextField(
blank=True, verbose_name=_('Encryption RSA public key (PEM format)'), validators=[validate_rsa_key]
)
private_key = models.TextField(
blank=True, verbose_name=_('Decryption RSA private key (PEM format)'), validators=[validate_rsa_key]
)
redirect_url_base = models.URLField(
max_length=256,
blank=True,
verbose_name=_('Base URL of decryption system'),
help_text=_('Base URL for redirect, empty for local'),
)
category = _('Misc')
@ -136,20 +140,23 @@ class Cryptor(BaseResource):
return _('this file-decrypt endpoint')
def get_filename(self, uuid, create=False):
dirname = os.path.join(default_storage.path(self.get_connector_slug()),
self.slug, uuid[0:2], uuid[2:4])
dirname = os.path.join(
default_storage.path(self.get_connector_slug()), self.slug, uuid[0:2], uuid[2:4]
)
if create:
makedir(dirname)
filename = os.path.join(dirname, uuid)
return filename
@endpoint(name='file-encrypt', perm='can_encrypt',
description=_('Encrypt a file'),
post={
'description': _('File to encrypt'),
'request_body': {'schema': {'application/json': FILE_SCHEMA}}
})
@endpoint(
name='file-encrypt',
perm='can_encrypt',
description=_('Encrypt a file'),
post={
'description': _('File to encrypt'),
'request_body': {'schema': {'application/json': FILE_SCHEMA}},
},
)
def file_encrypt(self, request, post_data):
if not self.public_key:
raise APIError('missing public key')
@ -168,8 +175,7 @@ class Cryptor(BaseResource):
if self.redirect_url_base:
redirect_url_base = self.redirect_url_base
else:
redirect_url_base = request.build_absolute_uri('%sfile-decrypt/' % (
self.get_absolute_url(),))
redirect_url_base = request.build_absolute_uri('%sfile-decrypt/' % (self.get_absolute_url(),))
redirect_url = urljoin(redirect_url_base, uuid)
content_filename = self.get_filename(uuid, create=True)
@ -189,16 +195,19 @@ class Cryptor(BaseResource):
return {'data': metadata}
@endpoint(name='file-decrypt', perm='can_decrypt',
description=_('Decrypt a file'),
pattern=r'(?P<uuid>[\w-]+)$',
example_pattern='{uuid}/',
parameters={
'uuid': {
'description': _('File identifier'),
'example_value': '12345678-abcd-4321-abcd-123456789012',
},
})
@endpoint(
name='file-decrypt',
perm='can_decrypt',
description=_('Decrypt a file'),
pattern=r'(?P<uuid>[\w-]+)$',
example_pattern='{uuid}/',
parameters={
'uuid': {
'description': _('File identifier'),
'example_value': '12345678-abcd-4321-abcd-123456789012',
},
},
)
def file_decrypt(self, request, uuid):
if not self.private_key:
raise APIError('missing private key')

View File

@ -16,12 +16,15 @@
import django.apps
class AppConfig(django.apps.AppConfig):
name = 'passerelle.apps.csvdatasource'
label = 'csvdatasource'
def get_connector_model(self):
from . import models
return models.CsvDataSource
default_app_config = 'passerelle.apps.csvdatasource.AppConfig'

View File

@ -41,25 +41,38 @@ class QueryForm(forms.ModelForm):
if named:
line = line.split(':', 1)
if len(line) != 2:
errors.append(ValidationError(
_('Syntax error line %d: each line must be prefixed '
'with an identifier followed by a colon.') % (i + 1)))
errors.append(
ValidationError(
_(
'Syntax error line %d: each line must be prefixed '
'with an identifier followed by a colon.'
)
% (i + 1)
)
)
continue
name, line = line
if not identifier_re.match(name):
errors.append(
ValidationError(_('Syntax error line %d: invalid identifier, '
'it must starts with a letter and only '
'contains letters, digits and _.') % (i + 1)))
ValidationError(
_(
'Syntax error line %d: invalid identifier, '
'it must starts with a letter and only '
'contains letters, digits and _.'
)
% (i + 1)
)
)
continue
try:
get_code(line)
except SyntaxError as e:
errors.append(ValidationError(
_('Syntax error line %(line)d at character %(character)d') % {
'line': i + 1,
'character': e.offset
}))
errors.append(
ValidationError(
_('Syntax error line %(line)d at character %(character)d')
% {'line': i + 1, 'character': e.offset}
)
)
if errors:
raise ValidationError(errors)
return lines

View File

@ -14,19 +14,41 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='CsvDataSource',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('title', models.CharField(verbose_name='Title', max_length=50)),
('slug', models.SlugField(verbose_name='Identifier', unique=True)),
('description', models.TextField(verbose_name='Description')),
('csv_file', models.FileField(help_text='Supported file formats: csv, ods, xls, xlsx',
upload_to=b'csv', verbose_name='Spreadsheet file')),
('columns_keynames', models.CharField(default=b'id, text',
help_text='ex: id,text,data1,data2',
max_length=256,
verbose_name='Column keynames',
blank=True)),
(
'csv_file',
models.FileField(
help_text='Supported file formats: csv, ods, xls, xlsx',
upload_to=b'csv',
verbose_name='Spreadsheet file',
),
),
(
'columns_keynames',
models.CharField(
default=b'id, text',
help_text='ex: id,text,data1,data2',
max_length=256,
verbose_name='Column keynames',
blank=True,
),
),
('skip_header', models.BooleanField(default=False, verbose_name='Skip first line')),
('users', models.ManyToManyField(to='base.ApiUser', related_name='_csvdatasource_users_+', related_query_name='+', blank=True)),
(
'users',
models.ManyToManyField(
to='base.ApiUser',
related_name='_csvdatasource_users_+',
related_query_name='+',
blank=True,
),
),
],
options={
'verbose_name': 'CSV File',

View File

@ -14,7 +14,13 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='csvdatasource',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Debug Enabled', blank=True, choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Debug Enabled',
blank=True,
choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')],
),
preserve_default=True,
),
]

View File

@ -14,7 +14,12 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='csvdatasource',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Log Level',
choices=[(b'DEBUG', b'DEBUG'), (b'INFO', b'INFO')],
),
preserve_default=True,
),
]

View File

@ -14,7 +14,19 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='csvdatasource',
name='log_level',
field=models.CharField(default=b'NOTSET', max_length=10, verbose_name='Log Level', choices=[(b'NOTSET', b'NOTSET'), (b'DEBUG', b'DEBUG'), (b'INFO', b'INFO'), (b'WARNING', b'WARNING'), (b'ERROR', b'ERROR'), (b'CRITICAL', b'CRITICAL')]),
field=models.CharField(
default=b'NOTSET',
max_length=10,
verbose_name='Log Level',
choices=[
(b'NOTSET', b'NOTSET'),
(b'DEBUG', b'DEBUG'),
(b'INFO', b'INFO'),
(b'WARNING', b'WARNING'),
(b'ERROR', b'ERROR'),
(b'CRITICAL', b'CRITICAL'),
],
),
preserve_default=True,
),
]

View File

@ -14,13 +14,54 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Query',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
(
'id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
('slug', models.SlugField(verbose_name='Name')),
('filters', models.TextField(help_text='List of filter clauses (Python expression)', verbose_name='Filters', blank=True)),
('projections', models.TextField(help_text='List of projections (name:expression)', verbose_name='Projections', blank=True)),
('order', models.TextField(help_text='Columns to use for sorting rows', verbose_name='Sort Order', blank=True)),
('distinct', models.TextField(help_text='Distinct columns', verbose_name='Distinct', blank=True)),
('structure', models.CharField(choices=[(b'array', 'Array'), (b'dict', 'Dictionary'), (b'keyed-distinct', 'Keyed Dictionary'), (b'tuples', 'Tuples'), (b'onerow', 'Single Row'), (b'one', 'Single Value')], default=b'dict', help_text='Data structure used for the response', max_length=20, verbose_name='Structure')),
(
'filters',
models.TextField(
help_text='List of filter clauses (Python expression)',
verbose_name='Filters',
blank=True,
),
),
(
'projections',
models.TextField(
help_text='List of projections (name:expression)',
verbose_name='Projections',
blank=True,
),
),
(
'order',
models.TextField(
help_text='Columns to use for sorting rows', verbose_name='Sort Order', blank=True
),
),
(
'distinct',
models.TextField(help_text='Distinct columns', verbose_name='Distinct', blank=True),
),
(
'structure',
models.CharField(
choices=[
(b'array', 'Array'),
(b'dict', 'Dictionary'),
(b'keyed-distinct', 'Keyed Dictionary'),
(b'tuples', 'Tuples'),
(b'onerow', 'Single Row'),
(b'one', 'Single Value'),
],
default=b'dict',
help_text='Data structure used for the response',
max_length=20,
verbose_name='Structure',
),
),
('resource', models.ForeignKey(to='csvdatasource.CsvDataSource', on_delete=models.CASCADE)),
],
options={

View File

@ -17,7 +17,10 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='TableRow',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'id',
models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
('line_number', models.IntegerField()),
('data', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=dict)),
],
@ -28,6 +31,8 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='tablerow',
name='resource',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='csvdatasource.CsvDataSource'),
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to='csvdatasource.CsvDataSource'
),
),
]

View File

@ -8,7 +8,9 @@ def generate_slug(instance):
slug = instance.slug
i = 1
while True:
queryset = instance._meta.model.objects.filter(slug=slug, resource=instance.resource).exclude(pk=instance.pk)
queryset = instance._meta.model.objects.filter(slug=slug, resource=instance.resource).exclude(
pk=instance.pk
)
if not queryset.exists():
break
slug = '%s-%s' % (instance.slug, i)

View File

@ -16,21 +16,48 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='csvdatasource',
name='columns_keynames',
field=models.CharField(blank=True, default='id, text', help_text='ex: id,text,data1,data2', max_length=256, verbose_name='Column keynames'),
field=models.CharField(
blank=True,
default='id, text',
help_text='ex: id,text,data1,data2',
max_length=256,
verbose_name='Column keynames',
),
),
migrations.AlterField(
model_name='csvdatasource',
name='csv_file',
field=models.FileField(help_text='Supported file formats: csv, ods, xls, xlsx', upload_to='csv', verbose_name='Spreadsheet file'),
field=models.FileField(
help_text='Supported file formats: csv, ods, xls, xlsx',
upload_to='csv',
verbose_name='Spreadsheet file',
),
),
migrations.AlterField(
model_name='query',
name='resource',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='queries', to='csvdatasource.CsvDataSource'),
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name='queries',
to='csvdatasource.CsvDataSource',
),
),
migrations.AlterField(
model_name='query',
name='structure',
field=models.CharField(choices=[('array', 'Array'), ('dict', 'Dictionary'), ('keyed-distinct', 'Keyed Dictionary'), ('tuples', 'Tuples'), ('onerow', 'Single Row'), ('one', 'Single Value')], default='dict', help_text='Data structure used for the response', max_length=20, verbose_name='Structure'),
field=models.CharField(
choices=[
('array', 'Array'),
('dict', 'Dictionary'),
('keyed-distinct', 'Keyed Dictionary'),
('tuples', 'Tuples'),
('onerow', 'Single Row'),
('one', 'Single Value'),
],
default='dict',
help_text='Data structure used for the response',
max_length=20,
verbose_name='Structure',
),
),
]

View File

@ -15,6 +15,10 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='csvdatasource',
name='csv_file',
field=models.FileField(help_text='Supported file formats: csv, ods, xls, xlsx', upload_to=passerelle.apps.csvdatasource.models.upload_to, verbose_name='Spreadsheet file'),
field=models.FileField(
help_text='Supported file formats: csv, ods, xls, xlsx',
upload_to=passerelle.apps.csvdatasource.models.upload_to,
verbose_name='Spreadsheet file',
),
),
]

View File

@ -52,7 +52,7 @@ code_cache = OrderedDict()
def get_code(expr):
# limit size of code cache to 1024
if len(code_cache) > 1024:
for key in list(code_cache.keys())[:len(code_cache) - 1024]:
for key in list(code_cache.keys())[: len(code_cache) - 1024]:
code_cache.pop(key)
if expr not in code_cache:
code_cache[expr] = compile(expr, '<inline>', 'eval')
@ -65,21 +65,13 @@ class Query(models.Model):
label = models.CharField(_('Label'), max_length=100)
description = models.TextField(_('Description'), blank=True)
filters = models.TextField(
_('Filters'),
blank=True,
help_text=_('List of filter clauses (Python expression)'))
order = models.TextField(
_('Sort Order'),
blank=True,
help_text=_('Columns to use for sorting rows'))
distinct = models.TextField(
_('Distinct'),
blank=True,
help_text=_('Distinct columns'))
_('Filters'), blank=True, help_text=_('List of filter clauses (Python expression)')
)
order = models.TextField(_('Sort Order'), blank=True, help_text=_('Columns to use for sorting rows'))
distinct = models.TextField(_('Distinct'), blank=True, help_text=_('Distinct columns'))
projections = models.TextField(
_('Projections'),
blank=True,
help_text=_('List of projections (name:expression)'))
_('Projections'), blank=True, help_text=_('List of projections (name:expression)')
)
structure = models.CharField(
_('Structure'),
max_length=20,
@ -89,9 +81,11 @@ class Query(models.Model):
('keyed-distinct', _('Keyed Dictionary')),
('tuples', _('Tuples')),
('onerow', _('Single Row')),
('one', _('Single Value'))],
('one', _('Single Value')),
],
default='dict',
help_text=_('Data structure used for the response'))
help_text=_('Data structure used for the response'),
)
class Meta:
ordering = ['slug']
@ -123,12 +117,10 @@ class Query(models.Model):
return self.slug
def delete_url(self):
return reverse('csv-delete-query',
kwargs={'connector_slug': self.resource.slug, 'pk': self.pk})
return reverse('csv-delete-query', kwargs={'connector_slug': self.resource.slug, 'pk': self.pk})
def edit_url(self):
return reverse('csv-edit-query',
kwargs={'connector_slug': self.resource.slug, 'pk': self.pk})
return reverse('csv-edit-query', kwargs={'connector_slug': self.resource.slug, 'pk': self.pk})
def upload_to(instance, filename):
@ -137,20 +129,23 @@ def upload_to(instance, filename):
class CsvDataSource(BaseResource):
csv_file = models.FileField(
_('Spreadsheet file'),
upload_to=upload_to,
help_text=_('Supported file formats: csv, ods, xls, xlsx'))
_('Spreadsheet file'), upload_to=upload_to, help_text=_('Supported file formats: csv, ods, xls, xlsx')
)
columns_keynames = models.CharField(
max_length=256,
verbose_name=_('Column keynames'),
default='id, text',
help_text=_('ex: id,text,data1,data2'), blank=True)
help_text=_('ex: id,text,data1,data2'),
blank=True,
)
skip_header = models.BooleanField(_('Skip first line'), default=False)
_dialect_options = JSONField(editable=False, null=True)
sheet_name = models.CharField(_('Sheet name'), blank=True, max_length=150)
category = _('Data Sources')
documentation_url = 'https://doc-publik.entrouvert.com/admin-fonctionnel/parametrage-avance/source-de-donnees-csv/'
documentation_url = (
'https://doc-publik.entrouvert.com/admin-fonctionnel/parametrage-avance/source-de-donnees-csv/'
)
class Meta:
verbose_name = _('Spreadsheet File')
@ -173,9 +168,7 @@ class CsvDataSource(BaseResource):
def _detect_dialect_options(self):
content = self.get_content_without_bom()
dialect = csv.Sniffer().sniff(content)
self.dialect_options = {
k: v for k, v in vars(dialect).items() if not k.startswith('_')
}
self.dialect_options = {k: v for k, v in vars(dialect).items() if not k.startswith('_')}
def save(self, *args, **kwargs):
cache = kwargs.pop('cache', True)
@ -193,7 +186,8 @@ class CsvDataSource(BaseResource):
TableRow.objects.filter(resource=self).delete()
for block in batch(enumerate(self.get_rows()), 5000):
TableRow.objects.bulk_create(
TableRow(resource=self, line_number=i, data=data) for i, data in block)
TableRow(resource=self, line_number=i, data=data) for i, data in block
)
def csv_file_datetime(self):
ctime = os.fstat(self.csv_file.fileno()).st_ctime
@ -205,8 +199,7 @@ class CsvDataSource(BaseResource):
@property
def dialect_options(self):
"""turn dict items into string
"""
"""turn dict items into string"""
file_type = self.csv_file.name.split('.')[-1]
if file_type in ('ods', 'xls', 'xlsx'):
return None
@ -317,8 +310,7 @@ class CsvDataSource(BaseResource):
query = Query(filters='\n'.join(filters))
return self.execute_query(request, query, query_params=params.dict())
@endpoint(perm='can_access', methods=['get'],
name='query', pattern=r'^(?P<query_name>[\w-]+)/$')
@endpoint(perm='can_access', methods=['get'], name='query', pattern=r'^(?P<query_name>[\w-]+)/$')
def select(self, request, query_name, **kwargs):
try:
query = Query.objects.get(resource=self.id, slug=query_name)
@ -371,8 +363,7 @@ class CsvDataSource(BaseResource):
filters = query.get_list('filters')
if filters:
data = [row for new_row, row in stream_expressions(filters, data, kind='filters')
if all(new_row)]
data = [row for new_row, row in stream_expressions(filters, data, kind='filters') if all(new_row)]
order = query.get_list('order')
if order:
@ -391,11 +382,13 @@ class CsvDataSource(BaseResource):
try:
hash(new_row)
except TypeError:
raise APIError(u'distinct value is unhashable',
data={
'row': repr(row),
'distinct': repr(new_row),
})
raise APIError(
u'distinct value is unhashable',
data={
'row': repr(row),
'distinct': repr(new_row),
},
)
if new_row in seen:
continue
new_data.append(row)
@ -413,24 +406,21 @@ class CsvDataSource(BaseResource):
titles.append(name)
expressions.append(expr)
new_data = []
for new_row, row in stream_expressions(expressions, data, kind='projection',
titles=titles):
for new_row, row in stream_expressions(expressions, data, kind='projection', titles=titles):
new_data.append(dict(zip(titles, new_row)))
data = new_data
if 'id' in request.GET:
# always provide a ?id= filter.
filters = ["id == %r" % force_text(request.GET['id'])]
data = [row for new_row, row in stream_expressions(filters, data, kind='filters')
if new_row[0]]
data = [row for new_row, row in stream_expressions(filters, data, kind='filters') if new_row[0]]
# allow jsonp queries by select2
# filtering is done there after projection because we need a projection named text for
# retro-compatibility with previous use of the csvdatasource with select2
if 'q' in request.GET:
filters = ["%s in normalize(text.lower())" % repr(normalize(request.GET['q'].lower()))]
data = [row for new_row, row in stream_expressions(filters, data, kind='filters')
if new_row[0]]
data = [row for new_row, row in stream_expressions(filters, data, kind='filters') if new_row[0]]
# force rendition of iterator as list
data = list(data)
@ -450,7 +440,7 @@ class CsvDataSource(BaseResource):
raise APIError('invalid offset parameter')
# paginate data
data = data[offset:offset+limit]
data = data[offset : offset + limit]
if query.structure == 'array':
return {'data': [[row[t] for t in titles] for row in data]}
@ -539,4 +529,4 @@ class TableRow(models.Model):
class Meta:
ordering = ('line_number',)
unique_together = (('resource', 'line_number'))
unique_together = ('resource', 'line_number')

View File

@ -19,12 +19,16 @@ from django.conf.urls import include, url
from .views import *
management_urlpatterns = [
url(r'^(?P<connector_slug>[\w,-]+)/download/$',
CsvDownload.as_view(), name='csv-download'),
url(r'^(?P<connector_slug>[\w,-]+)/queries/new/$',
NewQueryView.as_view(), name='csv-new-query'),
url(r'^(?P<connector_slug>[\w,-]+)/queries/(?P<pk>[\w,-]+)/$',
UpdateQueryView.as_view(), name='csv-edit-query'),
url(r'^(?P<connector_slug>[\w,-]+)/queries/(?P<pk>[\w,-]+)/delete$',
DeleteQueryView.as_view(), name='csv-delete-query'),
url(r'^(?P<connector_slug>[\w,-]+)/download/$', CsvDownload.as_view(), name='csv-download'),
url(r'^(?P<connector_slug>[\w,-]+)/queries/new/$', NewQueryView.as_view(), name='csv-new-query'),
url(
r'^(?P<connector_slug>[\w,-]+)/queries/(?P<pk>[\w,-]+)/$',
UpdateQueryView.as_view(),
name='csv-edit-query',
),
url(
r'^(?P<connector_slug>[\w,-]+)/queries/(?P<pk>[\w,-]+)/delete$',
DeleteQueryView.as_view(),
name='csv-delete-query',
),
]

View File

@ -29,9 +29,11 @@ from django.utils.translation import ugettext_lazy as _
from ..models import Invoice
def u(s):
return force_text(s, 'iso-8859-15')
class Loader(object):
def __init__(self, connector):
self.connector = connector
@ -45,6 +47,7 @@ class Loader(object):
fd = archive.open('data_full.csv')
if six.PY3:
import io
fd = io.TextIOWrapper(fd, 'iso-8859-15')
csvfile = six.StringIO(fd.read())
csvreader = csv.reader(csvfile, delimiter='\t')
@ -59,9 +62,11 @@ class Loader(object):
invoice['amount'] = str(Decimal(invoice['total_amount']) - paid_amount)
invoice['paid'] = bool(Decimal(invoice['amount']) == 0)
invoice['issue_date'] = datetime.datetime.strptime(
row['DAT_GENERATION_FAC'], '%d/%m/%Y').strftime('%Y-%m-%d')
row['DAT_GENERATION_FAC'], '%d/%m/%Y'
).strftime('%Y-%m-%d')
invoice['pay_limit_date'] = datetime.datetime.strptime(
row['DAT_LIMITEPAIE_FAC'], '%d/%m/%Y').strftime('%Y-%m-%d')
row['DAT_LIMITEPAIE_FAC'], '%d/%m/%Y'
).strftime('%Y-%m-%d')
invoice['online_payment'] = True
invoice['no_online_payment_reason'] = None
if not invoice['paid']:
@ -73,10 +78,12 @@ class Loader(object):
invoice['online_payment'] = False
invoice['no_online_payment_reason'] = 'autobilling'
obj, created = Invoice.objects.update_or_create(resource=self.connector,
external_id=row['ID_FAC'], defaults=invoice)
obj, created = Invoice.objects.update_or_create(
resource=self.connector, external_id=row['ID_FAC'], defaults=invoice
)
invoice_filename = '%s_%s.pdf' % (
datetime.datetime.strptime(row['DAT_DEBUT_PGE'], '%d/%m/%Y').strftime('%Y-%m'),
row['ID_FAC'])
datetime.datetime.strptime(row['DAT_DEBUT_PGE'], '%d/%m/%Y').strftime('%Y-%m'),
row['ID_FAC'],
)
if invoice_filename in archive_files:
obj.write_pdf(archive.read(invoice_filename))

View File

@ -52,7 +52,9 @@ def normalize_adult(adult):
def normalize_family(family, adults):
return {
'external_id': family['id_fam'],
'adults': [adults[family[id]] for id in ('id_per1', 'id_per2') if family[id] and adults.get(family[id])],
'adults': [
adults[family[id]] for id in ('id_per1', 'id_per2') if family[id] and adults.get(family[id])
],
'children': [],
'invoices': [],
'login': family['id_fam'],
@ -65,6 +67,7 @@ def normalize_family(family, adults):
'city': family['lib_commune_adr'],
}
def normalize_child(child):
sex = child['typ_sexe_per']
if sex == 'G':
@ -74,27 +77,30 @@ def normalize_child(child):
'first_name': child['lib_prenom_per'],
'last_name': child['lib_nom_per'],
'sex': sex,
'birthdate': get_date(child['dat_naissance'])
'birthdate': get_date(child['dat_naissance']),
}
def normalize_invoice(i):
invoice = {'external_id': i['id_fac'],
'label': i['id_fac'],
'total_amount': Decimal(i['mnt_facture_fac']),
'amount': Decimal(i['mnt_solde_fac']),
'issue_date': i['dat_generation_fac'],
'pay_limit_date': get_date(i['dat_limitepaie_fac']),
'autobilling': i['on_prelevauto_ins'] == 'O',
'online_payment': True,
'payment_date': get_datetime(i['dat_reglement']),
'litigation_date': get_date(i['dat_perception_fac']),
'paid': Decimal(i['mnt_solde_fac']) == 0
invoice = {
'external_id': i['id_fac'],
'label': i['id_fac'],
'total_amount': Decimal(i['mnt_facture_fac']),
'amount': Decimal(i['mnt_solde_fac']),
'issue_date': i['dat_generation_fac'],
'pay_limit_date': get_date(i['dat_limitepaie_fac']),
'autobilling': i['on_prelevauto_ins'] == 'O',
'online_payment': True,
'payment_date': get_datetime(i['dat_reglement']),
'litigation_date': get_date(i['dat_perception_fac']),
'paid': Decimal(i['mnt_solde_fac']) == 0,
}
return invoice
class Dialect(csv.Dialect):
'''Because sometimes it cannot be sniffed by csv.Sniffer'''
delimiter = ';'
doublequote = False
escapechar = None
@ -104,13 +110,16 @@ class Dialect(csv.Dialect):
class Loader(object):
def __init__(self, connector):
self.connector = connector
def clean(self, archive):
for filename in ('extract_prcit_personne.csv', 'extract_prcit_famille.csv',
'extract_prcit_enfant.csv', 'extract_prcit_facture.csv'):
for filename in (
'extract_prcit_personne.csv',
'extract_prcit_famille.csv',
'extract_prcit_enfant.csv',
'extract_prcit_facture.csv',
):
if not filename in archive.namelist():
raise ValidationError(_('Missing %(filename)s file in zip.') % {'filename': filename})
@ -118,6 +127,7 @@ class Loader(object):
fd = self.archive.open(filename)
if six.PY3:
import io
fd = io.TextIOWrapper(fd, 'iso-8859-15')
reader = csv.reader(fd, Dialect)
@ -144,7 +154,6 @@ class Loader(object):
families[invoice['id_fam']]['invoices'].append(normalize_invoice(invoice))
return families
def load(self, archive):
self.archive = archive
@ -157,20 +166,32 @@ class Loader(object):
import_start_timestamp = timezone.now()
try:
for family_data in families.values():
data = dict_cherry_pick(family_data,
('login', 'password', 'family_quotient',
'zipcode', 'street_number', 'street_name',
'address_complement', 'city'))
family, created = Family.objects.update_or_create(external_id=family_data['external_id'],
resource=self.connector, defaults=data)
data = dict_cherry_pick(
family_data,
(
'login',
'password',
'family_quotient',
'zipcode',
'street_number',
'street_name',
'address_complement',
'city',
),
)
family, created = Family.objects.update_or_create(
external_id=family_data['external_id'], resource=self.connector, defaults=data
)
for adult_data in family_data.get('adults') or []:
Adult.objects.update_or_create(family=family,
external_id=adult_data['external_id'], defaults=adult_data)
Adult.objects.update_or_create(
family=family, external_id=adult_data['external_id'], defaults=adult_data
)
for child_data in family_data.get('children') or []:
Child.objects.get_or_create(family=family,
external_id=child_data['external_id'], defaults=child_data)
Child.objects.get_or_create(
family=family, external_id=child_data['external_id'], defaults=child_data
)
for invoice_data in family_data.get('invoices') or []:
storage = DefaultStorage()
@ -179,17 +200,27 @@ class Loader(object):
invoice_path = os.path.join(invoices_dir, invoice_filename)
# create invoice object only if associated pdf exists
if os.path.exists(invoice_path):
invoice, created = Invoice.objects.update_or_create(resource=self.connector,
family=family, external_id=invoice_data['external_id'], defaults=invoice_data)
invoice, created = Invoice.objects.update_or_create(
resource=self.connector,
family=family,
external_id=invoice_data['external_id'],
defaults=invoice_data,
)
except Exception as e:
self.connector.logger.error('Error occured while importing data: %s', e)
Family.objects.filter(resource=self.connector, update_timestamp__lte=import_start_timestamp).delete()
Adult.objects.filter(family__resource=self.connector, update_timestamp__lte=import_start_timestamp).delete()
Child.objects.filter(family__resource=self.connector, update_timestamp__lte=import_start_timestamp).delete()
Adult.objects.filter(
family__resource=self.connector, update_timestamp__lte=import_start_timestamp
).delete()
Child.objects.filter(
family__resource=self.connector, update_timestamp__lte=import_start_timestamp
).delete()
# remove obsolete invoices and their pdfs
for invoice in Invoice.objects.filter(resource=self.connector, update_timestamp__lte=import_start_timestamp):
for invoice in Invoice.objects.filter(
resource=self.connector, update_timestamp__lte=import_start_timestamp
):
if invoice.has_pdf:
os.unlink(invoice.pdf_filename())
invoice.delete()

Some files were not shown because too many files have changed in this diff Show More