datasource: DataSourceProxy to access to other variables (#67288) #394

Merged
lguerin merged 1 commits from wip/67288-data-source-meta into main 2023-06-26 18:13:00 +02:00
3 changed files with 98 additions and 37 deletions

View File

@ -635,14 +635,17 @@ def test_data_sources_view(pub):
CardDef.wipe()
def test_data_sources_view_with_exception_in_preview(pub):
@mock.patch('wcs.data_sources._get_structured_items')
def test_data_sources_view_with_exception_in_preview(mock_get_structured_items, pub):
# all inner exceptions should be caught and displayed as empty result
mock_get_structured_items.side_effect = Exception
create_superuser(pub)
NamedDataSource.wipe()
data_source = NamedDataSource(name='foobar')
app = login(get_app(pub))
data_source.data_source = {'type': 'json', 'value': '{{data_source.foobar}}'}
data_source.data_source = {'type': 'json', 'value': 'xxx'}
data_source.store()
resp = app.get('/backoffice/settings/data-sources/%s/' % data_source.id)
assert 'Unexpected fatal error getting items for preview' in resp.text

View File

@ -1577,7 +1577,11 @@ def test_data_source_in_template(pub):
rsps.get(
'https://example.invalid/json',
json={
"data": [{"id": 0, "text": "zéro"}, {"id": 1, "text": "uné"}, {"id": 2, "text": "deux"}]
"data": [{"id": 0, "text": "zéro"}, {"id": 1, "text": "uné"}, {"id": 2, "text": "deux"}],
"meta": {
"foo": "bar",
"blah": {"a": "b", "c": "d"},
},
},
)
@ -1586,6 +1590,9 @@ def test_data_source_in_template(pub):
== 'zéro'
)
assert rsps.calls[-1].request.url == 'https://example.invalid/json?t=hello'
assert WorkflowStatusItem.compute('{{ data_source.foobar.meta.foo }}') == 'bar'
assert WorkflowStatusItem.compute('{{ data_source.foobar.meta.blah }}') == "{'a': 'b', 'c': 'd'}"
assert WorkflowStatusItem.compute('{{ data_source.foobar.meta.blah.c }}') == 'd'
def export_to_indented_xml(data_source, include_id=False):

View File

@ -520,39 +520,9 @@ def _get_structured_items(data_source, mode=None, raise_on_error=False):
# a 'data' key holding the list of items, each of them being a dict
# with at least both an "id" and a "text" key.
geojson = data_source.get('type') == 'geojson'
url = data_source.get('value')
url = get_json_url(data_source)
if not url:
return []
url = url.strip()
if Template.is_template_string(url):
vars = get_publisher().substitutions.get_context_variables(mode='lazy')
url = get_variadic_url(url, vars)
if data_source.get('qs_data'): # merge qs_data into url
from wcs.workflows import WorkflowStatusItem
parsed = urllib.parse.urlparse(url)
qs = list(urllib.parse.parse_qsl(parsed.query))
for key, value in data_source['qs_data'].items():
try:
value = WorkflowStatusItem.compute(value, raises=True, record_errors=False)
value = str(value) if value is not None else ''
except Exception as e:
get_publisher().record_error(
_(
'Failed to compute value "%(value)s" for "%(query)s" query parameter'
% {'value': value, 'query': key}
),
context='[DATASOURCE]',
exception=e,
notify=data_source.get('notify_on_errors'),
record=data_source.get('record_on_errors'),
)
else:
key = force_str(key)
value = force_str(value)
qs.append((key, value))
qs = urllib.parse.urlencode(qs)
url = urllib.parse.urlunparse(parsed[:4] + (qs,) + parsed[5:6])
request = get_request()
cache_key = get_cache_key(url, data_source)
@ -573,6 +543,43 @@ def _get_structured_items(data_source, mode=None, raise_on_error=False):
return []
def get_json_url(data_source):
url = data_source.get('value')
if not url:
return None
url = url.strip()
if Template.is_template_string(url):
vars = get_publisher().substitutions.get_context_variables(mode='lazy')
url = get_variadic_url(url, vars)
if data_source.get('qs_data'): # merge qs_data into url
from wcs.workflows import WorkflowStatusItem
parsed = urllib.parse.urlparse(url)
qs = list(urllib.parse.parse_qsl(parsed.query))
for key, value in data_source['qs_data'].items():
try:
value = WorkflowStatusItem.compute(value, raises=True, record_errors=False)
value = str(value) if value is not None else ''
except Exception as e:
get_publisher().record_error(
_(
'Failed to compute value "%(value)s" for "%(query)s" query parameter'
% {'value': value, 'query': key}
),
context='[DATASOURCE]',
exception=e,
notify=data_source.get('notify_on_errors'),
record=data_source.get('record_on_errors'),
)
else:
key = force_str(key)
value = force_str(value)
qs.append((key, value))
qs = urllib.parse.urlencode(qs)
url = urllib.parse.urlunparse(parsed[:4] + (qs,) + parsed[5:6])
return url
def get_real(data_source):
if not data_source:
return None
@ -1112,14 +1119,58 @@ class StubNamedDataSource(NamedDataSource):
class DataSourcesSubstitutionProxy:
def __getattr__(self, attr):
return get_structured_items(
NamedDataSource.get_by_slug(attr, stub_fallback=True).extended_data_source
)
return DataSourceProxy(attr)
def inspect_keys(self):
return []
class DataSourceProxy:
def __init__(self, name):
self.name = name
self.data_source = NamedDataSource.get_by_slug(self.name, stub_fallback=True)
self._list = get_structured_items(self.data_source.extended_data_source)
self._data = Ellipsis
def get_value(self):
return self._list
def __len__(self):
return len(self._list)
def __str__(self):
return str(self._list)
def __repr__(self):
return '<DataSourceProxy, %s>' % self.name
def __iter__(self):
yield from self._list
def __nonzero__(self):
return any(self)
def __contains__(self, value):
return value in list(self)
def __eq__(self, other):
return list(self) == list(other)
def __getitem__(self, key):
return list(self)[key]
def __getattr__(self, attr):
data_source = self.data_source.extended_data_source
if data_source.get('type') not in ['json', 'geojson']:
raise AttributeError
if self._data is Ellipsis:
url = get_json_url(data_source)
self._data = get_json_from_url(url, data_source)
if self._data is None:
raise AttributeError
return self._data[attr]
def has_chrono(publisher):
return publisher.get_site_option('chrono_url') is not None