536 lines
18 KiB
Python
536 lines
18 KiB
Python
import datetime
|
|
import json
|
|
import os
|
|
import re
|
|
import time
|
|
from unittest import mock
|
|
|
|
import pytest
|
|
from django.core.cache import cache
|
|
from django.utils import translation
|
|
from quixote import cleanup
|
|
|
|
import wcs.api # workaround against circular dependencies :/
|
|
import wcs.qommon.storage
|
|
from wcs.admin.settings import FileTypesDirectory
|
|
from wcs.fields import StringField
|
|
from wcs.qommon import evalutils, force_str
|
|
from wcs.qommon.backoffice.listing import pagination_links
|
|
from wcs.qommon.form import FileSizeWidget
|
|
from wcs.qommon.http_request import HTTPRequest
|
|
from wcs.qommon.humantime import humanduration2seconds, seconds2humanduration
|
|
from wcs.qommon.misc import (
|
|
date_format,
|
|
ellipsize,
|
|
format_time,
|
|
get_as_datetime,
|
|
json_loads,
|
|
normalize_geolocation,
|
|
parse_isotime,
|
|
simplify,
|
|
)
|
|
from wcs.scripts import Script
|
|
from wcs.wf.jump import JumpWorkflowStatusItem
|
|
from wcs.workflows import Workflow
|
|
|
|
from .utilities import clean_temporary_pub, create_temporary_pub, get_app
|
|
|
|
|
|
def setup_module(module):
|
|
cleanup()
|
|
|
|
|
|
def teardown_module(module):
|
|
clean_temporary_pub()
|
|
|
|
|
|
def test_parse_file_size():
|
|
assert FileSizeWidget.parse_file_size('17') == 17
|
|
assert FileSizeWidget.parse_file_size('17o') == 17
|
|
assert FileSizeWidget.parse_file_size('17 K') == 17 * 10 ** 3
|
|
assert FileSizeWidget.parse_file_size('17 M') == 17 * 10 ** 6
|
|
assert FileSizeWidget.parse_file_size('17 Mo') == 17 * 10 ** 6
|
|
assert FileSizeWidget.parse_file_size('17 MB') == 17 * 10 ** 6
|
|
assert FileSizeWidget.parse_file_size('17 Kio') == 17 * 2 ** 10
|
|
assert FileSizeWidget.parse_file_size('17 Mio') == 17 * 2 ** 20
|
|
assert FileSizeWidget.parse_file_size('17K') == 17 * 10 ** 3
|
|
assert FileSizeWidget.parse_file_size('17 K') == 17 * 10 ** 3
|
|
assert FileSizeWidget.parse_file_size(' 17 K ') == 17 * 10 ** 3
|
|
|
|
|
|
def test_parse_invalid_file_size():
|
|
for test_value in ('17i', 'hello', '0.4K', '2G'):
|
|
with pytest.raises(ValueError):
|
|
FileSizeWidget.parse_file_size(test_value)
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
'seconds, expected',
|
|
[
|
|
(1, '1 second'),
|
|
(3, '3 seconds'),
|
|
(100000, '1 day, 3 hours, 46 minutes and 40 seconds'),
|
|
(13, '13 seconds'),
|
|
(60, '1 minute'),
|
|
(3600, '1 hour'),
|
|
],
|
|
)
|
|
def test_humantime(seconds, expected):
|
|
pub = create_temporary_pub()
|
|
pub.ngettext = translation.ngettext
|
|
assert seconds2humanduration(seconds) == expected
|
|
assert humanduration2seconds(seconds2humanduration(seconds)) == seconds
|
|
|
|
|
|
def test_parse_mimetypes():
|
|
assert FileTypesDirectory.parse_mimetypes('application/pdf') == ['application/pdf']
|
|
assert FileTypesDirectory.parse_mimetypes('.pdf') == ['application/pdf']
|
|
assert set(FileTypesDirectory.parse_mimetypes('.pdf, .odt')) == {
|
|
'application/pdf',
|
|
'application/vnd.oasis.opendocument.text',
|
|
}
|
|
|
|
|
|
def test_format_mimetypes():
|
|
assert FileTypesDirectory.format_mimetypes(['application/pdf']) == 'application/pdf (.pdf)'
|
|
assert (
|
|
FileTypesDirectory.format_mimetypes(['application/pdf', 'text/rtf'])
|
|
== 'application/pdf (.pdf), text/rtf'
|
|
)
|
|
assert FileTypesDirectory.format_mimetypes(['application/pdf', 'application/msword']) in (
|
|
'application/pdf (.pdf), application/msword (.doc)',
|
|
'application/pdf (.pdf), application/msword (.dot)',
|
|
'application/pdf (.pdf), application/msword (.wiz)',
|
|
)
|
|
assert (
|
|
FileTypesDirectory.format_mimetypes(
|
|
[
|
|
'application/pdf',
|
|
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
|
'application/msword',
|
|
]
|
|
)
|
|
== 'application/pdf (.pdf), '
|
|
'application/vnd.openxmlformats-officedocument.wordprocessingml.document (.docx)'
|
|
'...'
|
|
)
|
|
|
|
|
|
def test_simplify_unchanged():
|
|
assert simplify('test') == 'test'
|
|
assert simplify('another-test') == 'another-test'
|
|
assert simplify('another_test', '_') == 'another_test'
|
|
|
|
|
|
def test_simplify_space():
|
|
assert simplify('test again') == 'test-again'
|
|
assert simplify(' test again ') == 'test-again'
|
|
assert simplify('test again', '_') == 'test_again'
|
|
assert simplify(' test again ', '_') == 'test_again'
|
|
|
|
|
|
def test_simplify_apostrophes():
|
|
assert simplify('test\'again') == 'test-again'
|
|
assert simplify('test\'\'\'again') == 'test-again'
|
|
|
|
|
|
def test_simplify_accented():
|
|
assert simplify('cliché') == 'cliche'
|
|
|
|
|
|
def test_simplify_remove():
|
|
assert simplify('this is: (a) "test"') == 'this-is-a-test'
|
|
assert simplify('a test; again?') == 'a-test-again'
|
|
|
|
|
|
def test_simplify_mix():
|
|
assert simplify(' this is: (a) "cliché" ') == 'this-is-a-cliche'
|
|
assert simplify(' À "cliché"; again? ') == 'a-cliche-again'
|
|
|
|
|
|
def test_json_str_decoder():
|
|
json_str = json.dumps({'lst': [{'a': 'b'}, 1, 2], 'bla': 'éléphant'})
|
|
|
|
assert isinstance(list(json_loads(json_str).keys())[0], str)
|
|
assert isinstance(json_loads(json_str)['lst'][0]['a'], str)
|
|
assert isinstance(json_loads(json_str)['bla'], str)
|
|
assert json_loads(json_str)['bla'] == force_str('éléphant')
|
|
|
|
|
|
def test_format_time():
|
|
assert format_time(None, '%(month_name)s') == '?'
|
|
assert format_time(1500000000, '%(month_name)s') == 'July'
|
|
assert format_time(1500000000, '%(month_name)s', gmtime=True) == 'July'
|
|
assert format_time(1500000000, '%(hour)s') == '4'
|
|
assert format_time(1500000000, '%(hour)s', gmtime=True) == '2'
|
|
assert format_time((2016, 8), '%(month)s') == '8'
|
|
assert format_time((2016, 8, 2), '%(month)s') == '8'
|
|
assert (
|
|
format_time(
|
|
time.localtime(
|
|
1500000000,
|
|
),
|
|
'%(month)s',
|
|
)
|
|
== '7'
|
|
)
|
|
assert (
|
|
format_time(
|
|
time.localtime(
|
|
1500000000,
|
|
),
|
|
'%(weekday_name)s',
|
|
)
|
|
== 'Friday'
|
|
)
|
|
|
|
|
|
def test_parse_isotime():
|
|
assert parse_isotime('2015-01-01T10:10:19Z') == 1420107019
|
|
assert parse_isotime('2015-01-01T10:10:19+00:00Z') == 1420107019
|
|
with pytest.raises(ValueError):
|
|
parse_isotime('2015-01-01T10:10:19')
|
|
with pytest.raises(ValueError):
|
|
parse_isotime('2015-01-0110:10:19Z')
|
|
|
|
|
|
def test_script_substitution_variable():
|
|
pub = create_temporary_pub()
|
|
pub.substitutions.feed(pub)
|
|
variables = pub.substitutions.get_context_variables()
|
|
with pytest.raises(AttributeError):
|
|
assert variables['script'].hello_world()
|
|
|
|
os.mkdir(os.path.join(pub.app_dir, 'scripts'))
|
|
with open(os.path.join(pub.app_dir, 'scripts', 'hello_world.py'), 'w') as fd:
|
|
fd.write('"""docstring"""\nresult = "hello world"')
|
|
assert variables['script'].hello_world() == 'hello world'
|
|
|
|
assert Script('hello_world').__doc__ == 'docstring'
|
|
|
|
os.mkdir(os.path.join(pub.APP_DIR, 'scripts'))
|
|
with open(os.path.join(pub.APP_DIR, 'scripts', 'hello_world.py'), 'w') as fd:
|
|
fd.write('result = "hello global world"')
|
|
assert variables['script'].hello_world() == 'hello world'
|
|
|
|
os.unlink(os.path.join(pub.app_dir, 'scripts', 'hello_world.py'))
|
|
assert variables['script'].hello_world() == 'hello global world'
|
|
|
|
with open(os.path.join(pub.app_dir, 'scripts', 'hello_world.py'), 'w') as fd:
|
|
fd.write('result = site_url')
|
|
assert variables['script'].hello_world() == 'http://example.net'
|
|
|
|
|
|
def test_default_charset():
|
|
pub = create_temporary_pub()
|
|
resp = get_app(pub).get('/')
|
|
assert 'utf-8' in resp.headers['Content-Type']
|
|
|
|
|
|
def test_age_in_years():
|
|
create_temporary_pub()
|
|
assert evalutils.age_in_years('2000-01-01', '2016-05-26') == 16
|
|
assert evalutils.age_in_years(datetime.date(2000, 1, 1), '2016-05-26') == 16
|
|
assert evalutils.age_in_years(time.struct_time((2000, 1, 1, 0, 0, 0, 0, 0, 0)), '2016-05-26') == 16
|
|
assert evalutils.age_in_years('2000-06-01', '2016-05-26') == 15
|
|
assert evalutils.age_in_years('2000-02-29', '2016-02-29') == 16
|
|
assert evalutils.age_in_years('2000-02-28', '2016-02-29') == 16
|
|
assert evalutils.age_in_years('2000-03-01', '2016-02-29') == 15
|
|
assert evalutils.age_in_years('2000-01-01') >= 16
|
|
|
|
|
|
def test_age_in_years_and_months():
|
|
create_temporary_pub()
|
|
assert evalutils.age_in_years_and_months('2000-01-01', '2016-05-26') == (16, 4)
|
|
assert evalutils.age_in_years_and_months('2000-01-01', datetime.date(2016, 5, 26)) == (16, 4)
|
|
assert evalutils.age_in_years_and_months(datetime.date(2000, 1, 1), '2016-05-26') == (16, 4)
|
|
assert evalutils.age_in_years_and_months(
|
|
time.struct_time((2000, 1, 1, 0, 0, 0, 0, 0, 0)), '2016-05-26'
|
|
) == (16, 4)
|
|
assert evalutils.age_in_years_and_months('2000-06-01', '2016-05-26') == (15, 11)
|
|
assert evalutils.age_in_years_and_months('2000-02-29', '2016-02-29') == (16, 0)
|
|
assert evalutils.age_in_years_and_months('2000-02-28', '2016-02-29') == (16, 0)
|
|
assert evalutils.age_in_years_and_months('2000-03-01', '2016-02-29') == (15, 11)
|
|
assert evalutils.age_in_years_and_months('2000-01-01') >= (16, 0)
|
|
|
|
|
|
def test_age_in_days():
|
|
assert evalutils.age_in_days('2000-01-01', '2001-01-01') == 366
|
|
assert evalutils.age_in_days(datetime.date(2000, 1, 1), '2001-01-01') == 366
|
|
assert evalutils.age_in_days(time.struct_time((2000, 1, 1, 0, 0, 0, 0, 0, 0)), '2001-01-01') == 366
|
|
assert evalutils.age_in_days('2001-01-01', '2002-01-01') == 365
|
|
|
|
|
|
def test_age_in_seconds():
|
|
assert evalutils.age_in_seconds('2000-01-01 00:00', '2000-01-01 01:00') == 3600
|
|
assert evalutils.age_in_seconds('2000-01-01', '2000-01-01 01:00') == 3600
|
|
assert evalutils.age_in_seconds(datetime.date(2000, 1, 1), '2000-01-01 01:00') == 3600
|
|
assert (
|
|
evalutils.age_in_seconds(time.struct_time((2000, 1, 1, 0, 0, 0, 0, 0, 0)), '2000-01-01 01:00') == 3600
|
|
)
|
|
|
|
|
|
def test_date_format():
|
|
pub = create_temporary_pub()
|
|
pub.cfg['language'] = {}
|
|
pub.write_cfg()
|
|
orig_environ = os.environ.copy()
|
|
try:
|
|
if 'LC_TIME' in os.environ:
|
|
del os.environ['LC_TIME']
|
|
if 'LC_ALL' in os.environ:
|
|
del os.environ['LC_ALL']
|
|
assert date_format() == '%Y-%m-%d'
|
|
os.environ['LC_ALL'] = 'nl_BE.UTF-8'
|
|
assert date_format() == '%Y-%m-%d'
|
|
os.environ['LC_ALL'] = 'fr_BE.UTF-8'
|
|
assert date_format() == '%d/%m/%Y'
|
|
os.environ['LC_TIME'] = 'nl_BE.UTF-8'
|
|
assert date_format() == '%Y-%m-%d'
|
|
pub.cfg['language'] = {'language': 'fr'}
|
|
assert date_format() == '%d/%m/%Y'
|
|
finally:
|
|
os.environ = orig_environ
|
|
|
|
|
|
def test_get_as_datetime():
|
|
create_temporary_pub()
|
|
datetime_value = datetime.datetime(2017, 4, 25, 12, 0)
|
|
assert get_as_datetime('2017-04-25 12:00') == datetime_value
|
|
assert get_as_datetime('2017-04-25 12:00:00') == datetime_value
|
|
assert get_as_datetime('2017-04-25T12:00:00Z') == datetime_value
|
|
assert get_as_datetime('2017-04-25T12:00:00') == datetime_value
|
|
assert get_as_datetime('25/04/2017 12:00') == datetime_value
|
|
|
|
|
|
def test_pagination():
|
|
pub = create_temporary_pub()
|
|
req = HTTPRequest(None, {'SERVER_NAME': 'example.net', 'SCRIPT_NAME': ''})
|
|
req.response.filter = {}
|
|
pub.form = {'ajax': 'true'}
|
|
pub._set_request(req)
|
|
|
|
def get_texts(s):
|
|
return [x for x in re.findall(r'>(.*?)<', str(s)) if x.strip()]
|
|
|
|
assert get_texts(pagination_links(0, 10, 0)) == ['1', '(0-0/0)', 'Per page: ', '10']
|
|
assert get_texts(pagination_links(0, 10, 10)) == ['1', '(1-10/10)', 'Per page: ', '10']
|
|
assert get_texts(pagination_links(0, 10, 20)) == ['1', '2', '(1-10/20)', 'Per page: ', '10', '20']
|
|
assert get_texts(pagination_links(10, 10, 20)) == ['1', '2', '(11-20/20)', 'Per page: ', '10', '20']
|
|
assert get_texts(pagination_links(10, 10, 50)) == [
|
|
'1',
|
|
'2',
|
|
'3',
|
|
'4',
|
|
'5',
|
|
'(11-20/50)',
|
|
'Per page: ',
|
|
'10',
|
|
'20',
|
|
'50',
|
|
]
|
|
assert get_texts(pagination_links(10, 10, 500)) == [
|
|
'1',
|
|
'2',
|
|
'3',
|
|
'4',
|
|
'5',
|
|
'6',
|
|
'7',
|
|
'…',
|
|
'50',
|
|
'(11-20/500)',
|
|
'Per page: ',
|
|
'10',
|
|
'20',
|
|
'50',
|
|
'100',
|
|
]
|
|
assert get_texts(pagination_links(100, 10, 500)) == [
|
|
'1',
|
|
'…',
|
|
'8',
|
|
'9',
|
|
'10',
|
|
'11',
|
|
'12',
|
|
'13',
|
|
'14',
|
|
'…',
|
|
'50',
|
|
'(101-110/500)',
|
|
'Per page: ',
|
|
'10',
|
|
'20',
|
|
'50',
|
|
'100',
|
|
]
|
|
assert get_texts(pagination_links(100, 20, 500)) == [
|
|
'1',
|
|
'…',
|
|
'3',
|
|
'4',
|
|
'5',
|
|
'6',
|
|
'7',
|
|
'8',
|
|
'9',
|
|
'…',
|
|
'25',
|
|
'(101-120/500)',
|
|
'Per page: ',
|
|
'10',
|
|
'20',
|
|
'50',
|
|
'100',
|
|
]
|
|
|
|
# check limit
|
|
assert '(1-10/1000)' in get_texts(pagination_links(0, 10, 1000))
|
|
assert '(1-100/1000)' in get_texts(pagination_links(0, 100, 1000))
|
|
assert '(1-100/1000)' in get_texts(pagination_links(0, 101, 1000)) # 100 is the max
|
|
|
|
# new default pagination, more than 100
|
|
if not pub.site_options.has_section('options'):
|
|
pub.site_options.add_section('options')
|
|
pub.site_options.set('options', 'default-page-size', '500')
|
|
with open(os.path.join(pub.app_dir, 'site-options.cfg'), 'w') as fd:
|
|
pub.site_options.write(fd)
|
|
assert '(1-101/1000)' in get_texts(pagination_links(0, 101, 1000))
|
|
assert '(1-500/1000)' in get_texts(pagination_links(0, 500, 1000))
|
|
assert '(1-500/1000)' in get_texts(pagination_links(0, 501, 1000)) # 500 is the max
|
|
|
|
|
|
def test_cache():
|
|
cache.set('hello', 'world')
|
|
assert cache.get('hello') == 'world'
|
|
|
|
|
|
def test_normalize_geolocation():
|
|
assert normalize_geolocation({'lat': 10.0, 'lon': 0.0}) == {'lat': 10.0, 'lon': 0.0}
|
|
assert normalize_geolocation({'lat': -10.0, 'lon': 0.0}) == {'lat': -10.0, 'lon': 0.0}
|
|
assert normalize_geolocation({'lat': 100.0, 'lon': 0.0}) == {'lat': -80.0, 'lon': 0.0}
|
|
assert normalize_geolocation({'lat': -100.0, 'lon': 0.0}) == {'lat': 80.0, 'lon': 0.0}
|
|
assert normalize_geolocation({'lat': 180.0, 'lon': 0.0}) == {'lat': 0.0, 'lon': 0.0}
|
|
assert normalize_geolocation({'lat': -180.0, 'lon': 0.0}) == {'lat': 0.0, 'lon': 0.0}
|
|
assert normalize_geolocation({'lat': 200.0, 'lon': 0.0}) == {'lat': 20.0, 'lon': 0.0}
|
|
assert normalize_geolocation({'lat': -200.0, 'lon': 0.0}) == {'lat': -20.0, 'lon': 0.0}
|
|
|
|
assert normalize_geolocation({'lat': 0.0, 'lon': 10.0}) == {'lat': 0.0, 'lon': 10.0}
|
|
assert normalize_geolocation({'lat': 0.0, 'lon': -10.0}) == {'lat': 0.0, 'lon': -10.0}
|
|
assert normalize_geolocation({'lat': 0.0, 'lon': 200.0}) == {'lat': 0.0, 'lon': -160.0}
|
|
assert normalize_geolocation({'lat': 0.0, 'lon': -200.0}) == {'lat': 0.0, 'lon': 160.0}
|
|
assert normalize_geolocation({'lat': 0.0, 'lon': 360.0}) == {'lat': 0.0, 'lon': 0.0}
|
|
assert normalize_geolocation({'lat': 0.0, 'lon': -360.0}) == {'lat': 0.0, 'lon': 0.0}
|
|
assert normalize_geolocation({'lat': 0.0, 'lon': 400.0}) == {'lat': 0.0, 'lon': 40.0}
|
|
assert normalize_geolocation({'lat': 0.0, 'lon': -400.0}) == {'lat': 0.0, 'lon': -40.0}
|
|
|
|
|
|
def test_dict_from_prefix():
|
|
d = evalutils.dict_from_prefix('var1', {})
|
|
assert d == {}
|
|
|
|
d = evalutils.dict_from_prefix('', {'k1': 'v1'})
|
|
assert d == {'k1': 'v1'}
|
|
|
|
d = evalutils.dict_from_prefix('k', {'k1': 'v1', 'k2': 'v2'})
|
|
assert d == {'1': 'v1', '2': 'v2'}
|
|
|
|
d = evalutils.dict_from_prefix('v', {'k1': 'v1', 'k2': 'v2'})
|
|
assert d == {}
|
|
|
|
|
|
def test_objects_repr():
|
|
workflow = Workflow(name='wf')
|
|
st1 = workflow.add_status('Status1', 'st1')
|
|
jump = JumpWorkflowStatusItem()
|
|
jump.id = '_jump'
|
|
st1.items.append(jump)
|
|
|
|
assert 'st1' in repr(st1)
|
|
assert '_jump' in repr(jump)
|
|
|
|
field = StringField()
|
|
assert repr(field) == '<StringField None None>'
|
|
field.id = '1'
|
|
field.label = 'test'
|
|
assert repr(field) == "<StringField 1 'test'>"
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
'value, length, expected',
|
|
[
|
|
('', 30, ''),
|
|
(None, 30, 'None'),
|
|
('foo bar', 30, 'foo bar'),
|
|
('01234567890123456789012345678', 30, '01234567890123456789012345678'),
|
|
('012345678901234567890123456789', 30, '012345678901234567890123456789'),
|
|
('0123456789012345678901234567890', 30, '012345678901234567890123456(…)'),
|
|
('foo bar', 4, 'f(…)'),
|
|
('foo bar', 3, 'foo'),
|
|
('foo bar', 2, 'fo'),
|
|
],
|
|
)
|
|
def test_ellipsize(value, length, expected):
|
|
create_temporary_pub()
|
|
assert ellipsize(value, length=length) == expected
|
|
|
|
|
|
def test_criteria_repr():
|
|
criteria = wcs.qommon.storage.GreaterOrEqual('foo', 'bar')
|
|
assert 'GreaterOrEqual' in repr(criteria)
|
|
assert 'foo' in repr(criteria)
|
|
assert 'bar' in repr(criteria)
|
|
|
|
criteria = wcs.qommon.storage.Null('foo')
|
|
assert 'Null' in repr(criteria)
|
|
|
|
|
|
def test_related_field_repr():
|
|
from wcs.backoffice.management import RelatedField
|
|
|
|
related_field = RelatedField(None, field=StringField(label='foo'), parent_field=StringField(label='bar'))
|
|
assert 'foo' in repr(related_field)
|
|
assert 'bar' in repr(related_field)
|
|
|
|
|
|
def test_find_vc_version():
|
|
import wcs.qommon.admin.menu
|
|
|
|
def mocked_popen(*args, **kwargs):
|
|
class Process:
|
|
returncode = 0
|
|
|
|
def communicate(self):
|
|
return (
|
|
b'''Desired=Unknown/Install/Remove/Purge/Hold
|
|
| Status=Not/Inst/Conf-files/Unpacked/halF-conf/Half-inst/trig-aWait/Trig-pend
|
|
|/ Err?=(none)/Reinst-required (Status,Err: uppercase=bad)
|
|
||/ Name Version Architecture Description
|
|
+++-==============-===============-============-=================================================
|
|
ii wcs 5.71-1~eob100+1 all web application to design and set up online forms
|
|
''',
|
|
'',
|
|
)
|
|
|
|
return Process()
|
|
|
|
with mock.patch('os.path.exists') as os_path_exists, mock.patch('subprocess.Popen') as popen:
|
|
|
|
def mocked_os_path_exists(path):
|
|
return bool(not path.endswith('setup.py'))
|
|
|
|
os_path_exists.side_effect = mocked_os_path_exists
|
|
|
|
handle = mock.MagicMock()
|
|
handle.__enter__.side_effect = mocked_popen
|
|
popen.return_value = handle
|
|
|
|
version = wcs.qommon.admin.menu._find_vc_version()
|
|
assert version == 'wcs 5.71-1~eob100+1 (Debian)'
|
|
|
|
|
|
def test_uwsgi_spooler_import():
|
|
with pytest.raises(ImportError):
|
|
import wcs.qommon.spooler # noqa pylint: disable=unused-import
|