325 lines
11 KiB
Python
325 lines
11 KiB
Python
# -*- coding: utf-8 -*-
|
|
|
|
"""Testing utilities functions."""
|
|
|
|
import os
|
|
import base64
|
|
import re
|
|
import datetime
|
|
|
|
import pytest
|
|
|
|
from django.core.files import File
|
|
from django.core.files.base import ContentFile
|
|
|
|
from atreal_openads.utils import (
|
|
to_dash_case,
|
|
force_encoded_string_output,
|
|
strip_tags,
|
|
clean_spaces,
|
|
normalize,
|
|
get_file_data,
|
|
get_file_digest,
|
|
get_upload_path,
|
|
get_file_extension,
|
|
trunc_str_values,
|
|
DictDumper
|
|
)
|
|
|
|
from atreal_openads.models import (
|
|
ForwardFile,
|
|
Guichet,
|
|
Collectivite,
|
|
AtrealOpenads
|
|
)
|
|
|
|
|
|
CONNECTOR_NAME = 'atreal-openads'
|
|
CONNECTOR_SLUG = 'atreal'
|
|
COLLECTIVITE = 79
|
|
OPENADS_API_LOGIN = 'publik-passerelle'
|
|
OPENADS_API_PASSWORD = base64.urlsafe_b64encode(os.urandom(20))
|
|
OPENADS_API_URL = 'http://openads.api/'
|
|
|
|
FAKE_COOKIE_CRSF = base64.urlsafe_b64encode(os.urandom(20))
|
|
FAKE_NUMERO_DOSSIER = base64.urlsafe_b64encode(os.urandom(10))
|
|
|
|
TESTS_DIR = os.path.dirname(__file__)
|
|
RESOURCES_DIR = os.path.join(TESTS_DIR, 'resources')
|
|
TEST_FILE_CERFA_DIA = os.path.join(RESOURCES_DIR, 'cerfa_10072-02.pdf')
|
|
TEST_FILE_PLAN_CADASTRAL = os.path.join(RESOURCES_DIR, 'plancadastral.pdf')
|
|
|
|
|
|
@pytest.fixture
|
|
# pylint: disable=unused-argument,invalid-name
|
|
def atreal_openads(db):
|
|
"""Return an instance of a connector AtrealOpenads."""
|
|
return AtrealOpenads.objects.create(
|
|
slug=CONNECTOR_SLUG,
|
|
default_collectivite_openADS_id=COLLECTIVITE,
|
|
openADS_API_url=OPENADS_API_URL,
|
|
basic_auth_username=OPENADS_API_LOGIN,
|
|
basic_auth_password=OPENADS_API_PASSWORD
|
|
)
|
|
|
|
@pytest.fixture
|
|
# pylint: disable=unused-argument,redefined-outer-name,invalid-name
|
|
def collectivite_1(db, atreal_openads):
|
|
"""Return an instance of a 'Collectivite'."""
|
|
return Collectivite.objects.create( # pylint: disable=no-member
|
|
name=u'Macollectivité',
|
|
connecteur=atreal_openads,
|
|
openADS_id='3'
|
|
)
|
|
|
|
@pytest.fixture
|
|
# pylint: disable=unused-argument,redefined-outer-name,invalid-name
|
|
def collectivite_1_guichet(db, atreal_openads, collectivite_1):
|
|
"""Return an instance of a 'Guichet'."""
|
|
return Guichet.objects.create( # pylint: disable=no-member
|
|
collectivite=collectivite_1,
|
|
ouverture_jour_h=datetime.time(9, 0),
|
|
fermeture_jour_h=datetime.time(17, 0),
|
|
ouverture_sem_d=1, # Lundi
|
|
fermeture_sem_d=5, # Vendredi
|
|
ouverture_sem_h=datetime.time(8, 30),
|
|
fermeture_sem_h=datetime.time(12, 15)
|
|
)
|
|
|
|
|
|
def test_to_dash_case():
|
|
"""Test for function 'to_dash_case()'."""
|
|
|
|
astring = 'ACamelCaseName'
|
|
assert to_dash_case(astring) == 'a-camel-case-name'
|
|
|
|
assert to_dash_case('') == ''
|
|
|
|
|
|
def test_force_encoded_string_output(): # pylint: disable=invalid-name
|
|
"""Test for function 'force_encoded_string_output()'."""
|
|
|
|
def a_str_function():
|
|
"""Return a hardcoded string 'toto'."""
|
|
return str('toto')
|
|
ret = force_encoded_string_output(a_str_function)()
|
|
assert isinstance(ret, str)
|
|
ret = force_encoded_string_output(a_str_function, 'latin1')()
|
|
assert isinstance(ret, str)
|
|
|
|
def an_unicode_function():
|
|
"""Return a hardcoded string 'toto' in unicode."""
|
|
return u'toto'
|
|
ret = force_encoded_string_output(an_unicode_function)()
|
|
assert isinstance(ret, str)
|
|
ret = force_encoded_string_output(an_unicode_function, 'latin1')()
|
|
assert isinstance(ret, str)
|
|
|
|
|
|
def test_strip_tags():
|
|
"""Test for function 'strip_tags()'."""
|
|
|
|
base_string = 'aaa b cc '
|
|
assert strip_tags(base_string) == base_string
|
|
|
|
astring = base_string + '<em>dd'
|
|
assert strip_tags(astring) == base_string + 'dd'
|
|
|
|
astring = base_string + '<em>dd</em>'
|
|
assert strip_tags(astring) == base_string + 'dd'
|
|
|
|
astring = base_string + '<em>dd</em>'
|
|
assert strip_tags(astring) == base_string + 'dd'
|
|
|
|
astring = base_string + ' 1 < 3'
|
|
assert strip_tags(astring) == base_string + ' 1 < 3'
|
|
|
|
|
|
def test_clean_spaces():
|
|
"""Test for function 'clean_spaces()'."""
|
|
|
|
astring = 'aaa b cc '
|
|
assert clean_spaces(astring) == 'aaa b cc'
|
|
|
|
astring = 'a\ta b\nb c\rc d\\n\\r\\td'
|
|
assert clean_spaces(astring) == 'a a b b c c d d'
|
|
|
|
|
|
def test_normalize():
|
|
"""Test for function 'normalize()'."""
|
|
|
|
assert normalize(None) == ''
|
|
|
|
astring = 'aaa b cc '
|
|
assert normalize(astring) == 'aaa b cc'
|
|
|
|
astring = 'a\ta b\nb c\rc d\\n\\r\\td'
|
|
assert normalize(astring) == 'a a b b c c d d'
|
|
|
|
|
|
def test_get_file_data():
|
|
"""Test for function 'get_file_data()'."""
|
|
|
|
assert get_file_data(TEST_FILE_CERFA_DIA) == base64.b64encode(open(TEST_FILE_CERFA_DIA).read())
|
|
assert get_file_data(TEST_FILE_CERFA_DIA, b64=False) == open(TEST_FILE_CERFA_DIA).read()
|
|
|
|
|
|
def test_get_file_digest():
|
|
"""Test for function 'get_file_digest()'."""
|
|
|
|
with open(TEST_FILE_CERFA_DIA) as file_pt:
|
|
assert get_file_digest(file_pt) == ('cc90a620982760fdee16a5b4fe1b5ac3'
|
|
'b4fe868fd02d2f70b27f1e46d283ea51')
|
|
|
|
|
|
def test_get_upload_path():
|
|
"""Test for function 'get_upload_path()'."""
|
|
|
|
forwardfile = ForwardFile(
|
|
numero_demande='45641531',
|
|
numero_dossier=FAKE_NUMERO_DOSSIER,
|
|
type_fichier='CERFA',
|
|
orig_filename=os.path.basename(TEST_FILE_CERFA_DIA),
|
|
content_type='application/pdf',
|
|
file_hash='ffdf456fdsvgb4bgfb6g4f5b',
|
|
upload_file=File(open(TEST_FILE_CERFA_DIA, 'r')),
|
|
upload_status='pending',
|
|
connecteur=None,
|
|
collectivite=None
|
|
)
|
|
regex = r"^to_openADS__%s__%s\.pdf$" % (
|
|
'[0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}h[0-9]{2}m[0-9]{2}s[0-9]+', 'ffdf')
|
|
assert re.search(regex, get_upload_path(forwardfile))
|
|
|
|
|
|
def test_get_file_extension():
|
|
"""Test for function 'get_file_extension()'."""
|
|
|
|
assert get_file_extension('afile.pdf') == '.pdf'
|
|
assert get_file_extension('afile', 'application/pdf') == '.pdf'
|
|
assert get_file_extension('') == ''
|
|
assert get_file_extension('afile') == ''
|
|
|
|
|
|
def test_trunc_str_values():
|
|
"""Test for function 'trunc_str_values()'."""
|
|
|
|
dic = {}
|
|
assert trunc_str_values(dic, 10) == dic
|
|
dic = {'a': '123456789'}
|
|
assert trunc_str_values(dic, 0) == {'a': u'…'}
|
|
dic = {'a': '123456789'}
|
|
assert trunc_str_values(dic, 1) == {'a': u'1…'}
|
|
dic = {'a': '123456789'}
|
|
assert trunc_str_values(dic, 2) == {'a': u'12…'}
|
|
dic = {'a': '123456789'}
|
|
assert trunc_str_values(dic, 5) == {'a': u'12345…'}
|
|
dic = {'a': '123456789'}
|
|
assert trunc_str_values(dic, 8) == {'a': u'12345678…'}
|
|
dic = {'a': '123456789'}
|
|
assert trunc_str_values(dic, 9) == {'a': u'123456789'}
|
|
dic = {'a': '123456789'}
|
|
assert trunc_str_values(dic, 10) == dic
|
|
|
|
dic = {'a': '123456789', 'b123456789': '987654321'}
|
|
assert trunc_str_values(dic, 5) == {'a': u'12345…', 'b123456789': u'98765…'}
|
|
|
|
dic = {'a': '123456789', 'b123456789': '987654321', 'c': {'c1':'ABCDEFGHIJK'}}
|
|
assert trunc_str_values(dic, 5) == {'a': u'12345…', 'b123456789': u'98765…',
|
|
'c': {'c1': u'ABCDE…'}}
|
|
|
|
dic = {'a': '123456789', 'b123456789': '987654321', 'c': {'c1':'ABCDEFGHIJK'},
|
|
'd': ['123456789']}
|
|
assert trunc_str_values(dic, 5) == {'a': u'12345…', 'b123456789': u'98765…',
|
|
'c': {'c1': u'ABCDE…'}, 'd': [u'12345…']}
|
|
|
|
dic = {'a': '123456789', 'b123456789': '987654321', 'c': {'c1':'ABCDEFGHIJK'},
|
|
'd': ['123456789', {'eeeeeeeeee':'132456789'}]}
|
|
assert trunc_str_values(dic, 5) == {'a': u'12345…', 'b123456789': u'98765…',
|
|
'c': {'c1': u'ABCDE…'},
|
|
'd': [u'12345…', {'eeeeeeeeee': u'13245…'}]}
|
|
|
|
|
|
def test_dict_dumper():
|
|
"""Test for methods of class 'DictDumper'."""
|
|
|
|
dic = {}
|
|
|
|
dumped = DictDumper(dic, use_json_dumps=False)
|
|
assert repr(dumped) == (u'DictDumper(dic=%r,max_str_len=%r,use_json_dumps=%r)' % (
|
|
dic, dumped.max_str_len, dumped.use_json_dumps)).encode('utf-8')
|
|
assert str(dumped) == '{}'
|
|
assert unicode(dumped) == u'{}'
|
|
|
|
assert dic == dumped.dic
|
|
assert unicode(dic) == unicode(dumped)
|
|
dumped = DictDumper(dic, 0, use_json_dumps=False)
|
|
assert dic == dumped.dic
|
|
assert unicode(dic) == unicode(dumped)
|
|
|
|
dic = {'a': '123456789'}
|
|
dumped = DictDumper(dic, 10, use_json_dumps=False)
|
|
assert dic == dumped.dic
|
|
assert unicode(dic) == unicode(dumped)
|
|
dumped = DictDumper(dic, 5, use_json_dumps=False)
|
|
assert dic == dumped.dic
|
|
assert unicode(dumped) == unicode({'a': u'12345…'})
|
|
dumped = DictDumper(dic, 5, use_json_dumps=True)
|
|
assert dic == dumped.dic
|
|
assert unicode(dumped) == u'{"a": "12345\\u2026"}'
|
|
|
|
|
|
# pylint: disable=unused-argument,redefined-outer-name
|
|
def test_base_model(atreal_openads, collectivite_1, collectivite_1_guichet):
|
|
"""Test for methods of class 'BaseModel' through instance of a ForwardFile."""
|
|
|
|
forwardfile = ForwardFile(
|
|
numero_demande='45641531',
|
|
numero_dossier=FAKE_NUMERO_DOSSIER,
|
|
type_fichier='CERFA',
|
|
orig_filename=os.path.basename(TEST_FILE_CERFA_DIA),
|
|
content_type='application/pdf',
|
|
file_hash='ffdf456fdsvgb4bgfb6g4f5b',
|
|
upload_file=ContentFile('toto'),
|
|
upload_status='pending',
|
|
connecteur=atreal_openads,
|
|
collectivite=None
|
|
)
|
|
|
|
assert forwardfile.get_verbose_name() == 'Forward File'
|
|
assert forwardfile.get_verbose_name_plural() == 'Forward Files'
|
|
|
|
assert forwardfile.get_class_name() == 'ForwardFile'
|
|
assert forwardfile.get_class_name_plural() == 'ForwardFiles'
|
|
|
|
assert forwardfile.get_class_name_dash_case() == 'forward-file'
|
|
assert forwardfile.get_class_name_plural_dash_case() == 'forward-files'
|
|
|
|
assert forwardfile.get_class_name_title() == 'Forward File'
|
|
assert forwardfile.get_class_name_plural_title() == 'Forward Files'
|
|
|
|
assert forwardfile.get_url_name('list', plural=True) == 'list-forward-files'
|
|
|
|
assert forwardfile.get_absolute_url() == '/manage/atreal-openads/atreal/forward-file/None'
|
|
assert forwardfile.get_edit_url() == '/manage/atreal-openads/atreal/edit-forward-file/None'
|
|
assert forwardfile.get_delete_url() == '/manage/atreal-openads/atreal/delete-forward-file/None'
|
|
assert forwardfile.get_list_url() == '/manage/atreal-openads/atreal/forward-files'
|
|
|
|
assert atreal_openads.get_class_name_plural() == 'AtrealOpenads'
|
|
|
|
assert atreal_openads.get_url_name('view') == 'view-connector'
|
|
|
|
params = atreal_openads.get_url_params(True)
|
|
assert params['connector'] == 'atreal-openads'
|
|
assert params['slug'] == atreal_openads.slug
|
|
|
|
with pytest.raises(Exception) as exception:
|
|
atreal_openads.get_list_url()
|
|
assert unicode(exception.value) == u"AtrealOpenads:get_list_url() method should not be called"
|
|
|
|
# TODO add more collectivite test cases
|
|
|
|
with pytest.raises(Exception) as exception:
|
|
collectivite_1_guichet.get_list_url()
|
|
assert unicode(exception.value) == u"Guichet:get_list_url() method should not be called"
|