wcs/tests/test_datasource.py

1377 lines
53 KiB
Python

import codecs
import io
import json
import os
import urllib.parse
import xml.etree.ElementTree as ET
from unittest import mock
import pytest
from wcs import data_sources, fields
from wcs.categories import DataSourceCategory
from wcs.data_sources import NamedDataSource, register_data_source_function
from wcs.formdef import FormDef
from wcs.qommon.form import Form, get_request
from wcs.qommon.http_request import HTTPRequest
from wcs.qommon.misc import indent_xml as indent
from wcs.workflows import WorkflowStatusItem
from .test_widgets import MockHtmlForm, mock_form_submission
from .utilities import clean_temporary_pub, create_temporary_pub
@pytest.fixture
def pub(request):
pub = create_temporary_pub()
req = HTTPRequest(None, {'SCRIPT_NAME': '/', 'SERVER_NAME': 'example.net'})
pub.set_app_dir(req)
with open(os.path.join(pub.app_dir, 'site-options.cfg'), 'w') as fd:
fd.write(
'''
[wscall-secrets]
api.example.com = 1234
'''
)
pub.load_site_options()
return pub
def teardown_module(module):
clean_temporary_pub()
@pytest.fixture
def no_request_pub(pub, request):
pub._request = None
@pytest.fixture
def requests_pub(pub, request):
req = HTTPRequest(None, {'SERVER_NAME': 'example.net', 'SCRIPT_NAME': ''})
pub._set_request(req)
return req
@pytest.fixture
def error_email(pub):
pub.cfg['debug'] = {'error_email': 'errors@localhost.invalid'}
pub.write_cfg()
pub.set_config()
def test_item_field_python_datasource(requests_pub):
req = get_request()
req.environ['REQUEST_METHOD'] = 'POST'
field = fields.ItemField()
field.id = 1
field.data_source = {'type': 'formula', 'value': '''[('1', 'un'), ('2', 'deux')]'''}
form = Form()
field.add_to_form(form)
widget = form.get_widget('f1')
assert widget is not None
assert widget.options == [('1', 'un', '1'), ('2', 'deux', '2')]
form = MockHtmlForm(widget)
mock_form_submission(req, widget, {'f1': ['1']})
assert widget.parse() == '1'
form = Form()
field.add_to_view_form(form, value='1')
widget = form.get_widget('f1')
form = MockHtmlForm(widget)
mock_form_submission(req, widget)
assert widget.parse() == '1'
def test_python_datasource(pub):
plain_list = [('1', 'foo'), ('2', 'bar')]
datasource = {'type': 'formula', 'value': repr(plain_list)}
assert data_sources.get_items(datasource) == [
('1', 'foo', '1', {'id': '1', 'text': 'foo'}),
('2', 'bar', '2', {'id': '2', 'text': 'bar'}),
]
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': 'foo'},
{'id': '2', 'text': 'bar'},
]
# three-item tuples
plain_list = [('1', 'foo', 'a'), ('2', 'bar', 'b')]
datasource = {'type': 'formula', 'value': repr(plain_list)}
assert data_sources.get_items(datasource) == [
('1', 'foo', 'a', {'id': '1', 'key': 'a', 'text': 'foo'}),
('2', 'bar', 'b', {'id': '2', 'key': 'b', 'text': 'bar'}),
]
# single-item tuples
plain_list = [('foo',), ('bar',)]
datasource = {'type': 'formula', 'value': repr(plain_list)}
assert data_sources.get_items(datasource) == [
('foo', 'foo', 'foo', {'id': 'foo', 'text': 'foo'}),
('bar', 'bar', 'bar', {'id': 'bar', 'text': 'bar'}),
]
# list of strings
plain_list = ['foo', 'bar']
datasource = {'type': 'formula', 'value': repr(plain_list)}
assert data_sources.get_items(datasource) == [
('foo', 'foo', 'foo', {'id': 'foo', 'text': 'foo'}),
('bar', 'bar', 'bar', {'id': 'bar', 'text': 'bar'}),
]
# list of dicts
plain_list = [{'id': 'foo', 'text': 'Foo'}, {'id': 'bar', 'text': 'Bar', 'disabled': True}]
datasource = {'type': 'formula', 'value': repr(plain_list)}
assert data_sources.get_items(datasource) == [('foo', 'Foo', 'foo', {'id': 'foo', 'text': 'Foo'})]
assert data_sources.get_items(datasource, include_disabled=True) == [
('foo', 'Foo', 'foo', {'id': 'foo', 'text': 'Foo'}),
('bar', 'Bar', 'bar', {'id': 'bar', 'text': 'Bar', 'disabled': True}),
]
def test_python_datasource_errors(pub, error_email, http_requests, emails, caplog):
# invalid python expression
datasource = {'type': 'formula', 'value': 'foobar', 'notify_on_errors': True, 'record_on_errors': True}
assert data_sources.get_items(datasource) == []
assert 'Failed to eval() Python data source' in emails.get_latest('subject')
assert pub.loggederror_class.count() == 1
logged_error = pub.loggederror_class.select()[0]
assert logged_error.workflow_id is None
assert logged_error.summary == "[DATASOURCE] Failed to eval() Python data source ('foobar')"
# expression not iterable
datasource = {'type': 'formula', 'value': '2', 'notify_on_errors': True, 'record_on_errors': True}
assert data_sources.get_items(datasource) == []
assert 'gave a non-iterable result' in emails.get_latest('subject')
assert pub.loggederror_class.count() == 2
logged_error = pub.loggederror_class.select(order_by='id')[1]
assert logged_error.workflow_id is None
assert logged_error.summary == "[DATASOURCE] Python data source ('2') gave a non-iterable result"
datasource = {
'type': 'formula',
'value': '[{"mairie-a-rdv", "Mairie A"}, {"mairie-b-rdv", "Mairie B"}]',
'record_on_errors': True,
}
assert data_sources.get_items(datasource) == []
assert pub.loggederror_class.count() == 3
logged_error = pub.loggederror_class.select(order_by='id')[2]
assert logged_error.workflow_id is None
assert logged_error.summary == (
'[DATASOURCE] Python data source (\'[{"mairie-a-rdv", "Mairie A"}, {"mairie-b-rdv", "Mairie B"}]\') gave a non usable result'
)
# list of dictionaries but some are missing a text key
datasource = {
'type': 'formula',
'value': '[{"id": "mairie-a-rdv", "text": "Mairie A"}, {"id": "mairie-b-rdv"}]',
'record_on_errors': True,
}
assert data_sources.get_items(datasource) == []
assert pub.loggederror_class.count() == 4
logged_error = pub.loggederror_class.select(order_by='id')[2]
assert logged_error.workflow_id is None
assert logged_error.summary == (
'[DATASOURCE] Python data source (\'[{"mairie-a-rdv", "Mairie A"}, {"mairie-b-rdv", "Mairie B"}]\') gave a non usable result'
)
if not pub.site_options.has_section('options'):
pub.site_options.add_section('options')
pub.site_options.set('options', 'forbid-python-expressions', 'true')
with open(os.path.join(pub.app_dir, 'site-options.cfg'), 'w') as fd:
pub.site_options.write(fd)
# running with disabled python expressions
pub.loggederror_class.wipe()
datasource = {
'type': 'formula',
'value': repr(['foo', 'bar']),
'record_on_errors': False,
'notify_on_errors': False,
}
emails.empty()
assert data_sources.get_items(datasource) == []
assert emails.count() == 1 # notified even with notify_on_errors set to False
assert 'Unauthorized Python Usage' in emails.get_latest('subject')
assert pub.loggederror_class.count() == 1
logged_error = pub.loggederror_class.select(order_by='latest_occurence_timestamp')[-1]
assert logged_error.workflow_id is None
assert logged_error.summary == 'Unauthorized Python Usage'
def test_python_datasource_with_evalutils(pub):
plain_list = [
{'id': 'foo', 'text': 'Foo', 'value': '2017-01-01'},
{'id': 'bar', 'text': 'Bar', 'value': '2015-01-01'},
]
datasource = {
'type': 'formula',
'value': '[x for x in %s if date(x["value"]) > date("2016-01-01")]' % repr(plain_list),
}
assert data_sources.get_items(datasource) == [
('foo', 'Foo', 'foo', {'id': 'foo', 'text': 'Foo', 'value': '2017-01-01'})
]
def test_json_datasource(pub, requests_pub, http_requests):
get_request().datasources_cache = {}
datasource = {'type': 'json', 'value': ''}
assert data_sources.get_items(datasource) == []
# missing file
get_request().datasources_cache = {}
json_file_path = os.path.join(pub.app_dir, 'test.json')
datasource = {'type': 'json', 'value': 'file://%s' % json_file_path}
assert data_sources.get_items(datasource) == []
# invalid json file
get_request().datasources_cache = {}
with open(json_file_path, 'wb') as json_file:
json_file.write(codecs.encode(b'foobar', 'zlib_codec'))
assert data_sources.get_items(datasource) == []
# empty json file
get_request().datasources_cache = {}
with open(json_file_path, 'w') as json_file:
json.dump({}, json_file)
assert data_sources.get_items(datasource) == []
# unrelated json file
get_request().datasources_cache = {}
with open(json_file_path, 'w') as json_file:
json.dump('foobar', json_file)
assert data_sources.get_items(datasource) == []
# another unrelated json file
get_request().datasources_cache = {}
with open(json_file_path, 'w') as json_file:
json.dump({'data': 'foobar'}, json_file)
assert data_sources.get_items(datasource) == []
# json file not using dictionaries
get_request().datasources_cache = {}
with open(json_file_path, 'w') as json_file:
json.dump({'data': [['1', 'foo'], ['2', 'bar']]}, json_file)
assert data_sources.get_items(datasource) == []
# a good json file
get_request().datasources_cache = {}
with open(json_file_path, 'w') as json_file:
json.dump({'data': [{'id': '1', 'text': 'foo'}, {'id': '2', 'text': 'bar'}]}, json_file)
assert data_sources.get_items(datasource) == [
('1', 'foo', '1', {'id': '1', 'text': 'foo'}),
('2', 'bar', '2', {'id': '2', 'text': 'bar'}),
]
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': 'foo'},
{'id': '2', 'text': 'bar'},
]
# a json file with additional keys
get_request().datasources_cache = {}
with open(json_file_path, 'w') as json_file:
json.dump(
{'data': [{'id': '1', 'text': 'foo', 'more': 'xxx'}, {'id': '2', 'text': 'bar', 'more': 'yyy'}]},
json_file,
)
assert data_sources.get_items(datasource) == [
('1', 'foo', '1', {'id': '1', 'text': 'foo', 'more': 'xxx'}),
('2', 'bar', '2', {'id': '2', 'text': 'bar', 'more': 'yyy'}),
]
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': 'foo', 'more': 'xxx'},
{'id': '2', 'text': 'bar', 'more': 'yyy'},
]
# json specified with a variadic url
get_request().datasources_cache = {}
class JsonUrlPath:
def get_substitution_variables(self):
return {'json_url': 'file://%s' % json_file_path}
pub.substitutions.feed(JsonUrlPath())
datasource = {'type': 'json', 'value': '[json_url]'}
assert data_sources.get_items(datasource) == [
('1', 'foo', '1', {'id': '1', 'text': 'foo', 'more': 'xxx'}),
('2', 'bar', '2', {'id': '2', 'text': 'bar', 'more': 'yyy'}),
]
# same with django templated url
get_request().datasources_cache = {}
pub.substitutions.feed(JsonUrlPath())
datasource = {'type': 'json', 'value': '{{ json_url }}'}
assert data_sources.get_items(datasource) == [
('1', 'foo', '1', {'id': '1', 'text': 'foo', 'more': 'xxx'}),
('2', 'bar', '2', {'id': '2', 'text': 'bar', 'more': 'yyy'}),
]
# json specified with a variadic url with an erroneous space
get_request().datasources_cache = {}
pub.substitutions.feed(JsonUrlPath())
datasource = {'type': 'json', 'value': ' [json_url]'}
assert data_sources.get_items(datasource) == [
('1', 'foo', '1', {'id': '1', 'text': 'foo', 'more': 'xxx'}),
('2', 'bar', '2', {'id': '2', 'text': 'bar', 'more': 'yyy'}),
]
# same with django templated url
get_request().datasources_cache = {}
pub.substitutions.feed(JsonUrlPath())
datasource = {'type': 'json', 'value': ' {{ json_url }}'}
assert data_sources.get_items(datasource) == [
('1', 'foo', '1', {'id': '1', 'text': 'foo', 'more': 'xxx'}),
('2', 'bar', '2', {'id': '2', 'text': 'bar', 'more': 'yyy'}),
]
# a json file with integer as 'id'
get_request().datasources_cache = {}
with open(json_file_path, 'w') as json_file:
json.dump({'data': [{'id': 1, 'text': 'foo'}, {'id': 2, 'text': 'bar'}]}, json_file)
assert data_sources.get_items(datasource) == [
('1', 'foo', '1', {'id': 1, 'text': 'foo'}),
('2', 'bar', '2', {'id': 2, 'text': 'bar'}),
]
assert data_sources.get_structured_items(datasource) == [
{'id': 1, 'text': 'foo'},
{'id': 2, 'text': 'bar'},
]
# a json file with empty or no text values
get_request().datasources_cache = {}
with open(json_file_path, 'w') as json_file:
json.dump({'data': [{'id': '1', 'text': ''}, {'id': '2'}]}, json_file)
assert data_sources.get_items(datasource) == [
('1', '', '1', {'id': '1', 'text': ''}),
('2', '2', '2', {'id': '2', 'text': '2'}),
]
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': ''},
{'id': '2', 'text': '2'},
]
# a json file with empty or no id
get_request().datasources_cache = {}
with open(json_file_path, 'w') as json_file:
json.dump({'data': [{'id': '', 'text': 'foo'}, {'text': 'bar'}, {'id': None}]}, json_file)
assert data_sources.get_items(datasource) == []
assert data_sources.get_structured_items(datasource) == []
# a json file with invalid datatype for the text entry, (list in text key),
# the invalid entry will be skipped
get_request().datasources_cache = {}
with open(json_file_path, 'w') as json_file:
json.dump({'data': [{'id': '1', 'text': ['foo']}, {'id': '2', 'text': 'bar'}]}, json_file)
assert data_sources.get_items(datasource) == [('2', 'bar', '2', {'id': '2', 'text': 'bar'})]
assert data_sources.get_structured_items(datasource) == [{'id': '2', 'text': 'bar'}]
# specify data_attribute
datasource = {'type': 'json', 'value': ' {{ json_url }}', 'data_attribute': 'results'}
get_request().datasources_cache = {}
with open(json_file_path, 'w') as json_file:
json.dump({'results': [{'id': '1', 'text': 'foo'}, {'id': '2', 'text': 'bar'}]}, json_file)
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': 'foo'},
{'id': '2', 'text': 'bar'},
]
datasource = {'type': 'json', 'value': ' {{ json_url }}', 'data_attribute': 'data'}
get_request().datasources_cache = {}
assert data_sources.get_structured_items(datasource) == []
get_request().datasources_cache = {}
with open(json_file_path, 'w') as json_file:
json.dump({'data': {'results': [{'id': '1', 'text': 'foo'}, {'id': '2', 'text': 'bar'}]}}, json_file)
assert data_sources.get_structured_items(datasource) == []
datasource = {'type': 'json', 'value': ' {{ json_url }}', 'data_attribute': 'data.results'}
get_request().datasources_cache = {}
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': 'foo'},
{'id': '2', 'text': 'bar'},
]
# specify id_attribute
datasource = {'type': 'json', 'value': ' {{ json_url }}', 'id_attribute': 'pk'}
get_request().datasources_cache = {}
with open(json_file_path, 'w') as json_file:
json.dump({'data': [{'pk': '1', 'text': 'foo'}, {'pk': '2', 'text': 'bar'}]}, json_file)
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': 'foo', 'pk': '1'},
{'id': '2', 'text': 'bar', 'pk': '2'},
]
datasource = {'type': 'json', 'value': ' {{ json_url }}', 'id_attribute': 'id'}
get_request().datasources_cache = {}
assert data_sources.get_structured_items(datasource) == []
# specify text_attribute
datasource = {'type': 'json', 'value': ' {{ json_url }}', 'text_attribute': 'label'}
get_request().datasources_cache = {}
with open(json_file_path, 'w') as json_file:
json.dump({'data': [{'id': '1', 'label': 'foo'}, {'id': '2', 'label': 'bar'}]}, json_file)
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': 'foo', 'label': 'foo'},
{'id': '2', 'text': 'bar', 'label': 'bar'},
]
datasource = {'type': 'json', 'value': ' {{ json_url }}', 'text_attribute': 'text'}
get_request().datasources_cache = {}
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': '1', 'label': 'foo'},
{'id': '2', 'text': '2', 'label': 'bar'},
]
def test_json_datasource_bad_url(pub, error_email, http_requests, emails):
datasource = {'type': 'json', 'value': 'http://remote.example.net/404'}
assert data_sources.get_items(datasource) == []
assert emails.count() == 0
datasource = {
'type': 'json',
'value': 'http://remote.example.net/404',
'notify_on_errors': True,
'record_on_errors': True,
}
assert data_sources.get_items(datasource) == []
assert emails.count() == 1
assert 'error in HTTP request to http://remote.example.net/404 (status: 404)' in emails.get_latest(
'subject'
)
assert pub.loggederror_class.count() == 1
logged_error = pub.loggederror_class.select()[0]
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Error loading JSON data source (error in HTTP request to http://remote.example.net/404 (status: 404))"
)
datasource = {
'type': 'json',
'value': 'http://remote.example.net/xml',
'notify_on_errors': True,
'record_on_errors': True,
}
assert data_sources.get_items(datasource) == []
assert emails.count() == 2
assert 'Error reading JSON data source' in emails.get_latest('subject')
assert pub.loggederror_class.count() == 2
logged_error = pub.loggederror_class.select(order_by='id')[1]
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Error reading JSON data source output (Expecting value: line 1 column 1 (char 0))"
)
datasource = {
'type': 'json',
'value': 'http://remote.example.net/connection-error',
'notify_on_errors': True,
'record_on_errors': True,
}
assert data_sources.get_items(datasource) == []
assert 'Error loading JSON data source' in emails.get_latest('subject')
assert pub.loggederror_class.count() == 3
logged_error = pub.loggederror_class.select(order_by='id')[2]
assert logged_error.workflow_id is None
assert logged_error.summary == "[DATASOURCE] Error loading JSON data source (error)"
datasource = {
'type': 'json',
'value': 'http://remote.example.net/json-list-err1',
'notify_on_errors': True,
'record_on_errors': True,
}
assert data_sources.get_items(datasource) == []
assert 'Error reading JSON data source output (err 1)' in emails.get_latest('subject')
assert pub.loggederror_class.count() == 4
logged_error = pub.loggederror_class.select(order_by='id')[3]
assert logged_error.workflow_id is None
assert logged_error.summary == "[DATASOURCE] Error reading JSON data source output (err 1)"
def test_json_datasource_bad_url_scheme(pub, error_email, emails):
datasource = {'type': 'json', 'value': '', 'notify_on_errors': True, 'record_on_errors': True}
assert data_sources.get_items(datasource) == []
assert emails.count() == 0
assert pub.loggederror_class.count() == 0
datasource = {'type': 'json', 'value': 'foo://bar', 'notify_on_errors': True, 'record_on_errors': True}
assert data_sources.get_items(datasource) == []
assert 'Error loading JSON data source' in emails.get_latest('subject')
assert 'invalid scheme in URL' in emails.get_latest('subject')
assert pub.loggederror_class.count() == 1
logged_error = pub.loggederror_class.select()[0]
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Error loading JSON data source (invalid scheme in URL foo://bar)"
)
datasource = {'type': 'json', 'value': '/bla/blo', 'notify_on_errors': True, 'record_on_errors': True}
assert data_sources.get_items(datasource) == []
assert 'Error loading JSON data source' in emails.get_latest('subject')
assert 'invalid scheme in URL' in emails.get_latest('subject')
assert pub.loggederror_class.count() == 2
logged_error = pub.loggederror_class.select(order_by='id')[1]
assert logged_error.workflow_id is None
assert (
logged_error.summary == "[DATASOURCE] Error loading JSON data source (invalid scheme in URL /bla/blo)"
)
@pytest.mark.parametrize('notify', [True, False])
@pytest.mark.parametrize('record', [True, False])
def test_json_datasource_bad_qs_data(pub, error_email, emails, notify, record):
datasource = {
'type': 'json',
'value': "https://whatever.com/json",
'qs_data': {'foo': '{% for invalid %}', 'bar': '{{ valid }}'},
'notify_on_errors': notify,
'record_on_errors': record,
}
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
urlopen.side_effect = lambda *args: io.StringIO(json.dumps({'data': [{'id': '1', 'text': 'foo'}]}))
assert data_sources.get_items(datasource) == [('1', 'foo', '1', {'id': '1', 'text': 'foo'})]
url = urlopen.call_args[0][0]
assert url == 'https://whatever.com/json?bar='
message = '[DATASOURCE] Failed to compute value "{% for invalid %}" for "foo" query parameter'
if notify:
assert emails.count() == 1
assert message in emails.get_latest('subject')
else:
assert emails.count() == 0
if record:
assert pub.loggederror_class.count() == 1
logged_error = pub.loggederror_class.select(order_by='id')[0]
assert logged_error.summary == message
else:
assert pub.loggederror_class.count() == 0
def test_geojson_datasource(pub, requests_pub, http_requests):
get_request()
get_request().datasources_cache = {}
datasource = {'type': 'geojson', 'value': ''}
assert data_sources.get_items(datasource) == []
# missing file
get_request().datasources_cache = {}
geojson_file_path = os.path.join(pub.app_dir, 'test.geojson')
datasource = {'type': 'geojson', 'value': 'file://%s' % geojson_file_path}
assert data_sources.get_items(datasource) == []
# invalid geojson file
get_request().datasources_cache = {}
with open(geojson_file_path, 'wb') as geojson_file:
geojson_file.write(codecs.encode(b'foobar', 'zlib_codec'))
assert data_sources.get_items(datasource) == []
# empty geojson file
get_request().datasources_cache = {}
with open(geojson_file_path, 'w') as geojson_file:
json.dump({}, geojson_file)
assert data_sources.get_items(datasource) == []
# unrelated geojson file
get_request().datasources_cache = {}
with open(geojson_file_path, 'w') as geojson_file:
json.dump('foobar', geojson_file)
assert data_sources.get_items(datasource) == []
# another unrelated geojson file
get_request().datasources_cache = {}
with open(geojson_file_path, 'w') as geojson_file:
json.dump({'features': 'foobar'}, geojson_file)
assert data_sources.get_items(datasource) == []
# a good geojson file
get_request().datasources_cache = {}
with open(geojson_file_path, 'w') as geojson_file:
json.dump(
{
'features': [
{'properties': {'id': '1', 'text': 'foo'}},
{'properties': {'id': '2', 'text': 'bar'}},
]
},
geojson_file,
)
assert data_sources.get_items(datasource) == [
('1', 'foo', '1', {'id': '1', 'text': 'foo', 'properties': {'id': '1', 'text': 'foo'}}),
('2', 'bar', '2', {'id': '2', 'text': 'bar', 'properties': {'id': '2', 'text': 'bar'}}),
]
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': 'foo', 'properties': {'id': '1', 'text': 'foo'}},
{'id': '2', 'text': 'bar', 'properties': {'id': '2', 'text': 'bar'}},
]
# a geojson file with additional keys
get_request().datasources_cache = {}
with open(geojson_file_path, 'w') as geojson_file:
json.dump(
{
'features': [
{'properties': {'id': '1', 'text': 'foo', 'more': 'xxx'}},
{'properties': {'id': '2', 'text': 'bar', 'more': 'yyy'}},
]
},
geojson_file,
)
assert data_sources.get_items(datasource) == [
(
'1',
'foo',
'1',
{'id': '1', 'text': 'foo', 'properties': {'id': '1', 'text': 'foo', 'more': 'xxx'}},
),
(
'2',
'bar',
'2',
{'id': '2', 'text': 'bar', 'properties': {'id': '2', 'text': 'bar', 'more': 'yyy'}},
),
]
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': 'foo', 'properties': {'id': '1', 'text': 'foo', 'more': 'xxx'}},
{'id': '2', 'text': 'bar', 'properties': {'id': '2', 'text': 'bar', 'more': 'yyy'}},
]
# geojson specified with a variadic url
get_request().datasources_cache = {}
class GeoJSONUrlPath:
def get_substitution_variables(self):
return {'geojson_url': 'file://%s' % geojson_file_path}
pub.substitutions.feed(GeoJSONUrlPath())
datasource = {'type': 'geojson', 'value': '[geojson_url]'}
assert data_sources.get_items(datasource) == [
(
'1',
'foo',
'1',
{'id': '1', 'text': 'foo', 'properties': {'id': '1', 'text': 'foo', 'more': 'xxx'}},
),
(
'2',
'bar',
'2',
{'id': '2', 'text': 'bar', 'properties': {'id': '2', 'text': 'bar', 'more': 'yyy'}},
),
]
# same with django templated url
get_request().datasources_cache = {}
pub.substitutions.feed(GeoJSONUrlPath())
datasource = {'type': 'geojson', 'value': '{{ geojson_url }}'}
assert data_sources.get_items(datasource) == [
(
'1',
'foo',
'1',
{'id': '1', 'text': 'foo', 'properties': {'id': '1', 'text': 'foo', 'more': 'xxx'}},
),
(
'2',
'bar',
'2',
{'id': '2', 'text': 'bar', 'properties': {'id': '2', 'text': 'bar', 'more': 'yyy'}},
),
]
# geojson specified with a variadic url with an erroneous space
get_request().datasources_cache = {}
pub.substitutions.feed(GeoJSONUrlPath())
datasource = {'type': 'geojson', 'value': ' [geojson_url]'}
assert data_sources.get_items(datasource) == [
(
'1',
'foo',
'1',
{'id': '1', 'text': 'foo', 'properties': {'id': '1', 'text': 'foo', 'more': 'xxx'}},
),
(
'2',
'bar',
'2',
{'id': '2', 'text': 'bar', 'properties': {'id': '2', 'text': 'bar', 'more': 'yyy'}},
),
]
# same with django templated url
get_request().datasources_cache = {}
pub.substitutions.feed(GeoJSONUrlPath())
datasource = {'type': 'geojson', 'value': ' {{ geojson_url }}'}
assert data_sources.get_items(datasource) == [
(
'1',
'foo',
'1',
{'id': '1', 'text': 'foo', 'properties': {'id': '1', 'text': 'foo', 'more': 'xxx'}},
),
(
'2',
'bar',
'2',
{'id': '2', 'text': 'bar', 'properties': {'id': '2', 'text': 'bar', 'more': 'yyy'}},
),
]
# a geojson file with integer as 'id'
get_request().datasources_cache = {}
with open(geojson_file_path, 'w') as geojson_file:
json.dump(
{
'features': [
{'properties': {'id': 1, 'text': 'foo'}},
{'properties': {'id': 2, 'text': 'bar'}},
]
},
geojson_file,
)
assert data_sources.get_items(datasource) == [
('1', 'foo', '1', {'id': 1, 'text': 'foo', 'properties': {'id': 1, 'text': 'foo'}}),
('2', 'bar', '2', {'id': 2, 'text': 'bar', 'properties': {'id': 2, 'text': 'bar'}}),
]
assert data_sources.get_structured_items(datasource) == [
{'id': 1, 'text': 'foo', 'properties': {'id': 1, 'text': 'foo'}},
{'id': 2, 'text': 'bar', 'properties': {'id': 2, 'text': 'bar'}},
]
# a geojson file with empty or no text values
get_request().datasources_cache = {}
with open(geojson_file_path, 'w') as geojson_file:
json.dump(
{'features': [{'properties': {'id': '1', 'text': ''}}, {'properties': {'id': '2'}}]}, geojson_file
)
assert data_sources.get_items(datasource) == [
('1', '1', '1', {'id': '1', 'text': '1', 'properties': {'id': '1', 'text': ''}}),
('2', '2', '2', {'id': '2', 'text': '2', 'properties': {'id': '2'}}),
]
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': '1', 'properties': {'id': '1', 'text': ''}},
{'id': '2', 'text': '2', 'properties': {'id': '2'}},
]
# a geojson file with empty or no id
get_request().datasources_cache = {}
with open(geojson_file_path, 'w') as geojson_file:
json.dump(
{
'features': [
{'properties': {'id': '', 'text': 'foo'}},
{'properties': {'text': 'bar'}},
{'properties': {'id': None}},
]
},
geojson_file,
)
assert data_sources.get_items(datasource) == []
assert data_sources.get_structured_items(datasource) == []
# specify id_property
datasource = {'type': 'geojson', 'value': ' {{ geojson_url }}', 'id_property': 'gid'}
get_request().datasources_cache = {}
with open(geojson_file_path, 'w') as geojson_file:
json.dump(
{
'features': [
{'properties': {'gid': '1', 'text': 'foo'}},
{'properties': {'gid': '2', 'text': 'bar'}},
]
},
geojson_file,
)
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': 'foo', 'properties': {'gid': '1', 'text': 'foo'}},
{'id': '2', 'text': 'bar', 'properties': {'gid': '2', 'text': 'bar'}},
]
# check with missing id property
datasource = {'type': 'geojson', 'value': ' {{ geojson_url }}', 'id_property': 'id'}
get_request().datasources_cache = {}
assert data_sources.get_structured_items(datasource) == []
# check with feature IDs
get_request().datasources_cache = {}
with open(geojson_file_path, 'w') as geojson_file:
json.dump(
{
'features': [
{'id': '1', 'properties': {'text': 'foo'}},
{'id': '2', 'properties': {'text': 'bar'}},
]
},
geojson_file,
)
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': 'foo', 'properties': {'text': 'foo'}},
{'id': '2', 'text': 'bar', 'properties': {'text': 'bar'}},
]
# specify label_template_property
datasource = {
'type': 'geojson',
'value': ' {{ geojson_url }}',
'label_template_property': '{{ id }}: {{ text }}',
}
get_request().datasources_cache = {}
with open(geojson_file_path, 'w') as geojson_file:
json.dump(
{
'features': [
{'properties': {'id': '1', 'text': 'foo'}},
{'properties': {'id': '2', 'text': 'bar'}},
]
},
geojson_file,
)
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': '1: foo', 'properties': {'id': '1', 'text': 'foo'}},
{'id': '2', 'text': '2: bar', 'properties': {'id': '2', 'text': 'bar'}},
]
# wrong template
get_request().datasources_cache = {}
datasource = {'type': 'geojson', 'value': ' {{ geojson_url }}', 'label_template_property': '{{ text }'}
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': '{{ text }', 'properties': {'id': '1', 'text': 'foo'}},
{'id': '2', 'text': '{{ text }', 'properties': {'id': '2', 'text': 'bar'}},
]
get_request().datasources_cache = {}
datasource = {'type': 'geojson', 'value': ' {{ geojson_url }}', 'label_template_property': 'text'}
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': 'text', 'properties': {'id': '1', 'text': 'foo'}},
{'id': '2', 'text': 'text', 'properties': {'id': '2', 'text': 'bar'}},
]
# unknown property or empty value
datasource = {'type': 'geojson', 'value': ' {{ geojson_url }}', 'label_template_property': '{{ label }}'}
get_request().datasources_cache = {}
with open(geojson_file_path, 'w') as geojson_file:
json.dump(
{
'features': [
{'properties': {'id': '1', 'text': 'foo', 'label': ''}},
{'properties': {'id': '2', 'text': 'bar'}},
]
},
geojson_file,
)
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': '1', 'properties': {'id': '1', 'text': 'foo', 'label': ''}},
{'id': '2', 'text': '2', 'properties': {'id': '2', 'text': 'bar'}},
]
def test_geojson_datasource_bad_url(pub, http_requests, error_email, emails):
datasource = {
'type': 'geojson',
'value': 'http://remote.example.net/404',
'notify_on_errors': True,
'record_on_errors': True,
}
assert data_sources.get_items(datasource) == []
assert 'Error loading JSON data source' in emails.get_latest('subject')
assert 'status: 404' in emails.get_latest('subject')
assert pub.loggederror_class.count() == 1
logged_error = pub.loggederror_class.select()[0]
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Error loading JSON data source (error in HTTP request to http://remote.example.net/404 (status: 404))"
)
datasource = {
'type': 'geojson',
'value': 'http://remote.example.net/xml',
'notify_on_errors': True,
'record_on_errors': True,
}
assert data_sources.get_items(datasource) == []
assert 'Error reading JSON data source output' in emails.get_latest('subject')
assert 'Expecting value:' in emails.get_latest('subject')
assert pub.loggederror_class.count() == 2
logged_error = pub.loggederror_class.select(order_by='id')[1]
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Error reading JSON data source output (Expecting value: line 1 column 1 (char 0))"
)
datasource = {
'type': 'geojson',
'value': 'http://remote.example.net/connection-error',
'notify_on_errors': True,
'record_on_errors': True,
}
assert data_sources.get_items(datasource) == []
assert 'Error loading JSON data source' in emails.get_latest('subject')
assert 'error' in emails.get_latest('subject')
assert pub.loggederror_class.count() == 3
logged_error = pub.loggederror_class.select(order_by='id')[2]
assert logged_error.workflow_id is None
assert logged_error.summary == "[DATASOURCE] Error loading JSON data source (error)"
datasource = {
'type': 'geojson',
'value': 'http://remote.example.net/json-list-err1',
'notify_on_errors': True,
'record_on_errors': True,
}
assert data_sources.get_items(datasource) == []
assert 'Error reading JSON data source output (err 1)' in emails.get_latest('subject')
assert pub.loggederror_class.count() == 4
logged_error = pub.loggederror_class.select(order_by='id')[3]
assert logged_error.workflow_id is None
assert logged_error.summary == "[DATASOURCE] Error reading JSON data source output (err 1)"
def test_geojson_datasource_bad_url_scheme(pub, error_email, emails):
datasource = {'type': 'geojson', 'value': ''}
assert data_sources.get_items(datasource) == []
assert emails.count() == 0
datasource = {'type': 'geojson', 'value': 'foo://bar', 'notify_on_errors': True, 'record_on_errors': True}
assert data_sources.get_items(datasource) == []
assert 'Error loading JSON data source' in emails.get_latest('subject')
assert 'invalid scheme in URL' in emails.get_latest('subject')
assert pub.loggederror_class.count() == 1
logged_error = pub.loggederror_class.select()[0]
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Error loading JSON data source (invalid scheme in URL foo://bar)"
)
datasource = {'type': 'geojson', 'value': '/bla/blo', 'notify_on_errors': True, 'record_on_errors': True}
assert data_sources.get_items(datasource) == []
assert 'Error loading JSON data source' in emails.get_latest('subject')
assert 'invalid scheme in URL' in emails.get_latest('subject')
assert pub.loggederror_class.count() == 2
logged_error = pub.loggederror_class.select(order_by='id')[1]
assert logged_error.workflow_id is None
assert (
logged_error.summary == "[DATASOURCE] Error loading JSON data source (invalid scheme in URL /bla/blo)"
)
def test_item_field_named_python_datasource(requests_pub):
NamedDataSource.wipe()
data_source = NamedDataSource(name='foobar')
data_source.data_source = {'type': 'formula', 'value': repr([('1', 'un'), ('2', 'deux')])}
data_source.store()
field = fields.ItemField()
field.id = 1
field.data_source = {
'type': 'foobar', # use the named data source defined earlier
}
form = Form()
field.add_to_form(form)
widget = form.get_widget('f1')
assert widget is not None
assert widget.options == [('1', 'un', '1'), ('2', 'deux', '2')]
def test_register_data_source_function(pub):
def xxx():
return [('1', 'foo'), ('2', 'bar')]
register_data_source_function(xxx)
datasource = {'type': 'formula', 'value': 'xxx()'}
assert data_sources.get_items(datasource) == [
('1', 'foo', '1', {'id': '1', 'text': 'foo'}),
('2', 'bar', '2', {'id': '2', 'text': 'bar'}),
]
assert data_sources.get_structured_items(datasource) == [
{'id': '1', 'text': 'foo'},
{'id': '2', 'text': 'bar'},
]
def test_data_source_substitution_variables(pub):
NamedDataSource.wipe()
data_source = NamedDataSource(name='foobar')
data_source.data_source = {'type': 'formula', 'value': repr(['un', 'deux'])}
data_source.store()
context = pub.substitutions.get_context_variables()
assert context.get('data_source').foobar == [{'id': 'un', 'text': 'un'}, {'id': 'deux', 'text': 'deux'}]
def test_data_source_slug_name(pub):
NamedDataSource.wipe()
data_source = NamedDataSource(name='foo bar')
data_source.store()
assert data_source.slug == 'foo_bar'
def test_data_source_new_id(pub):
NamedDataSource.wipe()
data_source = NamedDataSource(name='foo bar')
data_source.store()
assert data_source.id == '1'
data_source = NamedDataSource(name='foo bar2')
data_source.store()
assert data_source.id == '2'
data_source.remove_self()
data_source = NamedDataSource(name='foo bar3')
data_source.store()
assert data_source.id == '3'
NamedDataSource.wipe()
data_source = NamedDataSource(name='foo bar4')
data_source.store()
assert data_source.id == '1'
def test_optional_item_field_with_data_source(requests_pub):
NamedDataSource.wipe()
data_source = NamedDataSource(name='foobar')
data_source.data_source = {'type': 'formula', 'value': repr([('1', 'un'), ('2', 'deux')])}
data_source.store()
field = fields.ItemField()
field.id = 1
field.required = False
field.data_source = {
'type': 'foobar', # use the named data source defined earlier
}
form = Form()
field.add_to_form(form)
widget = form.get_widget('f1')
assert widget is not None
assert widget.options == [('1', 'un', '1'), ('2', 'deux', '2')]
def test_data_source_unicode(pub):
NamedDataSource.wipe()
data_source = NamedDataSource(name='foobar')
data_source.data_source = {'type': 'formula', 'value': "['uné', 'deux']"}
data_source.store()
data_source2 = NamedDataSource.select()[0]
assert data_source2.data_source == data_source.data_source
assert data_sources.get_items({'type': 'foobar'}) == [
('uné', 'uné', 'uné', {'id': 'uné', 'text': 'uné'}),
('deux', 'deux', 'deux', {'id': 'deux', 'text': 'deux'}),
]
NamedDataSource.wipe()
data_source = NamedDataSource(name='foobar')
data_source.data_source = {'type': 'json', 'value': "https://whatever.com/json"}
data_source.store()
data_source2 = NamedDataSource.select()[0]
assert data_source2.data_source == data_source.data_source
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
urlopen.side_effect = lambda *args: io.StringIO(
'{"data": [{"id": 0, "text": "zéro"}, {"id": 1, "text": "uné"}, {"id": 2, "text": "deux"}]}'
)
assert data_sources.get_items({'type': 'foobar'}) == [
('0', 'zéro', '0', {"id": 0, "text": "zéro"}),
('1', 'uné', '1', {"id": 1, "text": "uné"}),
('2', 'deux', '2', {"id": 2, "text": "deux"}),
]
@pytest.mark.parametrize('qs_data', [{}, {'arg1': 'val1', 'arg2': 'val2'}])
def test_data_source_signed(no_request_pub, qs_data):
NamedDataSource.wipe()
data_source = NamedDataSource(name='foobar')
data_source.data_source = {'type': 'json', 'value': "https://api.example.com/json"}
data_source.qs_data = qs_data
data_source.store()
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
urlopen.side_effect = lambda *args: io.StringIO('{"data": [{"id": 0, "text": "zero"}]}')
assert len(data_sources.get_items({'type': 'foobar'})) == 1
signed_url = urlopen.call_args[0][0]
assert signed_url.startswith('https://api.example.com/json?')
parsed = urllib.parse.urlparse(signed_url)
querystring = urllib.parse.parse_qs(parsed.query)
# stupid simple (but sufficient) signature test:
assert querystring['algo'] == ['sha256']
assert querystring['orig'] == ['example.net']
assert querystring['nonce'][0]
assert querystring['timestamp'][0]
assert querystring['signature'][0]
if qs_data:
assert querystring['arg1'][0] == 'val1'
assert querystring['arg2'][0] == 'val2'
data_source.data_source = {'type': 'json', 'value': "https://api.example.com/json?foo=bar"}
data_source.store()
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
urlopen.side_effect = lambda *args: io.StringIO('{"data": [{"id": 0, "text": "zero"}]}')
assert len(data_sources.get_items({'type': 'foobar'})) == 1
signed_url = urlopen.call_args[0][0]
assert signed_url.startswith('https://api.example.com/json?')
parsed = urllib.parse.urlparse(signed_url)
querystring = urllib.parse.parse_qs(parsed.query)
assert querystring['algo'] == ['sha256']
assert querystring['orig'] == ['example.net']
assert querystring['nonce'][0]
assert querystring['timestamp'][0]
assert querystring['signature'][0]
assert querystring['foo'][0] == 'bar'
if qs_data:
assert querystring['arg1'][0] == 'val1'
assert querystring['arg2'][0] == 'val2'
data_source.data_source = {'type': 'json', 'value': "https://no-secret.example.com/json"}
data_source.store()
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
urlopen.side_effect = lambda *args: io.StringIO('{"data": [{"id": 0, "text": "zero"}]}')
assert len(data_sources.get_items({'type': 'foobar'})) == 1
unsigned_url = urlopen.call_args[0][0]
if qs_data:
assert unsigned_url == 'https://no-secret.example.com/json?arg1=val1&arg2=val2'
else:
assert unsigned_url == 'https://no-secret.example.com/json'
data_source.data_source = {'type': 'json', 'value': "https://no-secret.example.com/json?foo=bar"}
data_source.store()
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
urlopen.side_effect = lambda *args: io.StringIO('{"data": [{"id": 0, "text": "zero"}]}')
assert len(data_sources.get_items({'type': 'foobar'})) == 1
unsigned_url = urlopen.call_args[0][0]
if qs_data:
assert unsigned_url == 'https://no-secret.example.com/json?foo=bar&arg1=val1&arg2=val2'
else:
assert unsigned_url == 'https://no-secret.example.com/json?foo=bar'
def test_named_datasource_json_cache(requests_pub):
NamedDataSource.wipe()
datasource = NamedDataSource(name='foobar')
datasource.data_source = {'type': 'json', 'value': 'http://whatever/'}
datasource.store()
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
urlopen.side_effect = lambda *args: io.StringIO(
json.dumps({'data': [{'id': '1', 'text': 'foo'}, {'id': '2', 'text': 'bar'}]})
)
assert data_sources.get_structured_items({'type': 'foobar'}) == [
{'id': '1', 'text': 'foo'},
{'id': '2', 'text': 'bar'},
]
assert urlopen.call_count == 1
get_request().datasources_cache = {}
assert data_sources.get_structured_items({'type': 'foobar'}) == [
{'id': '1', 'text': 'foo'},
{'id': '2', 'text': 'bar'},
]
assert urlopen.call_count == 2
datasource.cache_duration = '60'
datasource.store()
# will cache
get_request().datasources_cache = {}
assert data_sources.get_structured_items({'type': 'foobar'}) == [
{'id': '1', 'text': 'foo'},
{'id': '2', 'text': 'bar'},
]
assert urlopen.call_count == 3
# will get from cache
get_request().datasources_cache = {}
assert data_sources.get_structured_items({'type': 'foobar'}) == [
{'id': '1', 'text': 'foo'},
{'id': '2', 'text': 'bar'},
]
assert urlopen.call_count == 3
def test_named_datasource_id_parameter(requests_pub):
NamedDataSource.wipe()
datasource = NamedDataSource(name='foobar')
datasource.data_source = {'type': 'json', 'value': 'http://whatever/'}
datasource.id_parameter = 'id'
datasource.store()
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
value = [{'id': '1', 'text': 'foo'}]
urlopen.side_effect = lambda *args: io.StringIO(json.dumps({'data': value}))
assert datasource.get_structured_value('1') == value[0]
assert urlopen.call_count == 1
assert urlopen.call_args[0][0] == 'http://whatever/?id=1'
# try again, get from request.datasources_cache
assert datasource.get_structured_value('1') == value[0]
assert urlopen.call_count == 1 # no new call
get_request().datasources_cache = {}
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
value = [{'id': '1', 'text': 'bar'}, {'id': '2', 'text': 'foo'}]
urlopen.side_effect = lambda *args: io.StringIO(json.dumps({'data': value}))
assert datasource.get_structured_value('1') == value[0]
assert urlopen.call_count == 1
get_request().datasources_cache = {}
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
urlopen.side_effect = lambda *args: io.StringIO(json.dumps({'data': []})) # empty list
assert datasource.get_structured_value('1') is None
assert urlopen.call_count == 1
get_request().datasources_cache = {}
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
value = [{'id': '1', 'text': 'foo'}]
urlopen.side_effect = lambda *args: io.StringIO(json.dumps({'data': value, 'err': 0}))
assert datasource.get_structured_value('1') == value[0]
assert urlopen.call_count == 1
get_request().datasources_cache = {}
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
value = [{'id': '1', 'text': 'foo'}]
urlopen.side_effect = lambda *args: io.StringIO(json.dumps({'data': value, 'err': 1}))
assert datasource.get_structured_value('1') is None
assert urlopen.call_count == 1
# no cache for errors
assert datasource.get_structured_value('1') is None
assert urlopen.call_count == 2 # called again
get_request().datasources_cache = {}
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
value = {'id': '1', 'text': 'foo'} # not a list
urlopen.side_effect = lambda *args: io.StringIO(json.dumps({'data': value}))
assert datasource.get_structured_value('1') is None
assert urlopen.call_count == 1
get_request().datasources_cache = {}
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
urlopen.side_effect = lambda *args: io.StringIO('not json')
assert datasource.get_structured_value('1') is None
assert urlopen.call_count == 1
# ws badly configured, return all items
get_request().datasources_cache = {}
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
value = [{'id': '1', 'text': 'bar'}, {'id': '2', 'text': 'foo'}]
urlopen.side_effect = lambda *args: io.StringIO(json.dumps({'data': value}))
assert datasource.get_structured_value('2') == value[1]
assert urlopen.call_count == 1
# try again, get from request.datasources_cache
assert datasource.get_structured_value('2') == value[1]
assert urlopen.call_count == 1 # no new call
get_request().datasources_cache = {}
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
value = [{'id': '1', 'text': 'bar'}, {'id': '2', 'text': 'foo'}]
urlopen.side_effect = lambda *args: io.StringIO(json.dumps({'data': value}))
assert datasource.get_structured_value('3') is None
assert urlopen.call_count == 1
# try again, get from request.datasources_cache
assert datasource.get_structured_value('3') is None
assert urlopen.call_count == 1 # no new call
def test_named_datasource_in_formdef(pub):
NamedDataSource.wipe()
datasource = NamedDataSource(name='foobar')
datasource.data_source = {'type': 'json', 'value': 'http://whatever/'}
datasource.store()
assert datasource.slug == 'foobar'
formdef = FormDef()
assert not datasource.is_used_in_formdef(formdef)
formdef.fields = [
fields.ItemField(id='0', label='string', type='item', data_source={'type': 'foobar'}),
]
assert datasource.is_used_in_formdef(formdef)
datasource.slug = 'barfoo'
assert not datasource.is_used_in_formdef(formdef)
def test_data_source_in_template(pub):
NamedDataSource.wipe()
FormDef.wipe()
formdef = FormDef()
formdef.name = 'foobar'
formdef.workflow_options = {'foo': 'hello'}
formdef.fields = [
fields.StringField(id='1', label='Test', type='string', varname='foo'),
]
formdef.store()
formdata = formdef.data_class()()
formdata.data = {'1': 'hello'}
formdata.store()
pub.substitutions.feed(formdef)
pub.substitutions.feed(formdata)
data_source = NamedDataSource(name='foobar')
data_source.data_source = {'type': 'json', 'value': "https://example.invalid/json?t={{form_var_foo}}"}
data_source.store()
with pub.complex_data():
with mock.patch('wcs.qommon.misc.urlopen') as urlopen:
urlopen.side_effect = lambda *args: io.StringIO(
'{"data": [{"id": 0, "text": "zéro"}, {"id": 1, "text": "uné"}, {"id": 2, "text": "deux"}]}'
)
assert (
WorkflowStatusItem.compute('{{ data_source.foobar|first|get:"text" }}', allow_complex=True)
== 'zéro'
)
assert urlopen.call_args[0][0] == 'https://example.invalid/json?t=hello'
def export_to_indented_xml(data_source, include_id=False):
data_source_xml = data_source.export_to_xml(include_id=include_id)
indent(data_source_xml)
return data_source_xml
def assert_import_export_works(data_source, include_id=False):
data_source2 = NamedDataSource.import_from_xml_tree(
ET.fromstring(ET.tostring(data_source.export_to_xml(include_id))), include_id
)
assert ET.tostring(export_to_indented_xml(data_source)) == ET.tostring(
export_to_indented_xml(data_source2)
)
return data_source2
def test_data_source(pub):
data_source = NamedDataSource(name='test')
assert_import_export_works(data_source, include_id=True)
def test_data_source_with_category(pub):
category = DataSourceCategory(name='test category')
category.store()
data_source = NamedDataSource(name='test category')
data_source.category_id = category.id
data_source.store()
data_source2 = assert_import_export_works(data_source, include_id=True)
assert data_source2.category_id == data_source.category_id
# import with non existing category
DataSourceCategory.wipe()
export = ET.tostring(data_source.export_to_xml(include_id=True))
data_source3 = NamedDataSource.import_from_xml_tree(ET.fromstring(export), include_id=True)
assert data_source3.category_id is None
def test_data_source_with_qs_data(pub):
data_source = NamedDataSource(name='test')
data_source.qs_data = {'arg1': 'val1', 'arg2': 'val2'}
data_source.store()
data_source2 = assert_import_export_works(data_source, include_id=True)
assert data_source2.qs_data == {'arg1': 'val1', 'arg2': 'val2'}