2012-05-24 09:13:27 +02:00
|
|
|
# w.c.s. - web application for online forms
|
|
|
|
# Copyright (C) 2005-2012 Entr'ouvert
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2014-01-19 17:46:03 +01:00
|
|
|
import collections
|
2022-04-01 18:59:19 +02:00
|
|
|
import collections.abc
|
2018-09-22 11:24:43 +02:00
|
|
|
import hashlib
|
2022-12-16 11:43:11 +01:00
|
|
|
import json
|
2021-02-28 16:19:33 +01:00
|
|
|
import urllib.parse
|
2023-06-21 17:00:49 +02:00
|
|
|
import xml.etree.ElementTree as ET
|
2013-03-13 14:53:35 +01:00
|
|
|
|
2022-12-09 16:24:00 +01:00
|
|
|
from django.core.cache import cache
|
2020-07-28 14:48:00 +02:00
|
|
|
from django.template import TemplateSyntaxError, VariableDoesNotExist
|
2023-02-25 10:11:37 +01:00
|
|
|
from django.utils.encoding import force_bytes, force_str
|
2023-10-07 22:04:16 +02:00
|
|
|
from quixote import get_publisher, get_request, get_response, get_session
|
2022-11-02 14:18:09 +01:00
|
|
|
from quixote.errors import RequestError
|
2014-08-24 18:29:34 +02:00
|
|
|
from quixote.html import TemplateIO
|
2017-03-05 14:26:42 +01:00
|
|
|
|
2019-09-29 20:53:23 +02:00
|
|
|
from .api_utils import sign_url_auto_orig
|
2022-03-11 08:24:50 +01:00
|
|
|
from .categories import DataSourceCategory
|
2023-04-17 10:04:38 +02:00
|
|
|
from .qommon import _, get_logger, misc, pgettext
|
2021-02-16 15:58:46 +01:00
|
|
|
from .qommon.afterjobs import AfterJob
|
2021-02-16 10:27:57 +01:00
|
|
|
from .qommon.cron import CronJob
|
2023-08-13 17:50:41 +02:00
|
|
|
from .qommon.form import (
|
|
|
|
CompositeWidget,
|
|
|
|
ComputedExpressionWidget,
|
|
|
|
OptGroup,
|
|
|
|
SingleSelectWidget,
|
|
|
|
StringWidget,
|
|
|
|
ValidationError,
|
|
|
|
)
|
2019-09-29 20:53:23 +02:00
|
|
|
from .qommon.humantime import seconds2humanduration
|
2023-09-14 08:48:37 +02:00
|
|
|
from .qommon.misc import get_variadic_url, unlazy
|
2021-02-16 10:27:57 +01:00
|
|
|
from .qommon.publisher import get_publisher_class
|
2019-09-29 20:53:23 +02:00
|
|
|
from .qommon.storage import StorableObject
|
2023-09-21 16:20:32 +02:00
|
|
|
from .qommon.template import Template, TemplateError
|
2019-09-29 20:53:23 +02:00
|
|
|
from .qommon.xml_storage import XmlStorableObject
|
2012-05-24 09:13:27 +02:00
|
|
|
|
|
|
|
data_source_functions = {}
|
|
|
|
|
2020-01-18 20:33:44 +01:00
|
|
|
|
2024-03-04 17:02:17 +01:00
|
|
|
class NamedDataSourceImportError(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2021-11-09 09:52:38 +01:00
|
|
|
class DataSourceError(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2012-05-24 09:13:27 +02:00
|
|
|
def register_data_source_function(function, function_name=None):
|
|
|
|
if not function_name:
|
|
|
|
function_name = function.__name__
|
2021-03-22 11:14:42 +01:00
|
|
|
if function_name not in data_source_functions:
|
2012-05-24 09:13:27 +02:00
|
|
|
data_source_functions[function_name] = function
|
|
|
|
|
|
|
|
|
2023-11-21 12:01:41 +01:00
|
|
|
def get_data_source_entry_from_user(user):
|
|
|
|
user_dict = user.get_substitution_variables(prefix='')
|
|
|
|
del user_dict['user']
|
|
|
|
user_dict['id'] = user.id
|
|
|
|
user_dict['text'] = user.name
|
|
|
|
return user_dict
|
|
|
|
|
|
|
|
|
2012-05-24 09:13:27 +02:00
|
|
|
class DataSourceSelectionWidget(CompositeWidget):
|
2022-05-03 08:26:51 +02:00
|
|
|
def __init__(self, name, value=None, allowed_source_types=None, disallowed_source_types=None, **kwargs):
|
|
|
|
if allowed_source_types is None:
|
2022-12-16 11:43:11 +01:00
|
|
|
allowed_source_types = {'json', 'jsonp', 'geojson', 'named', 'cards', 'python', 'jsonvalue'}
|
2022-12-02 10:08:43 +01:00
|
|
|
if get_publisher().has_site_option('disable-python-expressions') and 'python' in allowed_source_types:
|
2022-05-03 08:26:51 +02:00
|
|
|
allowed_source_types.remove('python')
|
2022-12-02 10:08:43 +01:00
|
|
|
if get_publisher().has_site_option('disable-jsonp-sources') and 'jsonp' in allowed_source_types:
|
2022-05-03 08:26:51 +02:00
|
|
|
allowed_source_types.remove('jsonp')
|
|
|
|
if disallowed_source_types:
|
|
|
|
allowed_source_types = allowed_source_types.difference(disallowed_source_types)
|
|
|
|
|
2012-05-24 09:13:27 +02:00
|
|
|
CompositeWidget.__init__(self, name, value, **kwargs)
|
|
|
|
|
|
|
|
if not value:
|
|
|
|
value = {}
|
|
|
|
|
2020-11-13 17:30:10 +01:00
|
|
|
options = [(None, _('None'), None)]
|
|
|
|
|
2022-05-03 08:26:51 +02:00
|
|
|
if 'cards' in allowed_source_types:
|
2019-08-13 09:54:29 +02:00
|
|
|
from wcs.carddef import CardDef
|
2023-04-17 10:04:38 +02:00
|
|
|
from wcs.categories import CardDefCategory
|
2021-02-04 10:37:40 +01:00
|
|
|
|
2022-09-02 15:47:30 +02:00
|
|
|
user = get_request().user
|
|
|
|
cards_options = []
|
|
|
|
for ds in CardDef.get_carddefs_as_data_source():
|
2023-04-17 10:04:38 +02:00
|
|
|
option = [ds[2], ds[1], ds[2], {'carddef': ds[0]}]
|
2022-09-02 15:47:30 +02:00
|
|
|
if ds[3] and (user.is_admin or ds[0].is_of_concern_for_user(user)):
|
2023-04-17 10:04:38 +02:00
|
|
|
option[3].update({'data-goto-url': '%s%s' % (ds[0].get_url(), ds[3].get_url_slug())})
|
2022-09-02 15:47:30 +02:00
|
|
|
elif get_publisher().get_backoffice_root().is_accessible('cards'):
|
2023-04-17 10:04:38 +02:00
|
|
|
option[3].update({'data-goto-url': ds[0].get_admin_url()})
|
2023-05-16 15:41:39 +02:00
|
|
|
option[3].update({'data-has-image': str(ds[0].has_image_field()).lower()})
|
2022-09-02 15:47:30 +02:00
|
|
|
cards_options.append(option)
|
2020-11-13 17:30:10 +01:00
|
|
|
cards_options.sort(key=lambda x: misc.simplify(x[1]))
|
|
|
|
if cards_options:
|
2023-04-17 10:04:38 +02:00
|
|
|
carddef_categories = CardDefCategory.select()
|
|
|
|
CardDefCategory.sort_by_position(carddef_categories)
|
|
|
|
if carddef_categories:
|
|
|
|
carddef_categories.append(CardDefCategory(pgettext('categories', 'Uncategorised')))
|
|
|
|
for carddef_category in carddef_categories:
|
|
|
|
carddef_category.cards_options = [
|
|
|
|
x for x in cards_options if x[3]['carddef'].category_id == carddef_category.id
|
|
|
|
]
|
|
|
|
if carddef_category.cards_options:
|
|
|
|
options.append(OptGroup('%s - %s' % (_('Cards'), carddef_category.name)))
|
|
|
|
options.extend(carddef_category.cards_options)
|
|
|
|
else:
|
|
|
|
options.append(OptGroup(_('Cards')))
|
|
|
|
options.extend(cards_options)
|
2020-11-13 17:30:10 +01:00
|
|
|
|
2022-05-03 08:26:51 +02:00
|
|
|
if 'named' in allowed_source_types:
|
2022-05-18 22:00:01 +02:00
|
|
|
admin_accessible = NamedDataSource.is_admin_accessible()
|
2021-02-16 14:15:41 +01:00
|
|
|
nds_options = []
|
|
|
|
nds_agenda_options = []
|
2021-04-13 14:40:14 +02:00
|
|
|
nds_users_options = []
|
2021-02-16 14:15:41 +01:00
|
|
|
for ds in NamedDataSource.select():
|
2022-03-11 08:24:50 +01:00
|
|
|
option = [
|
2021-02-16 14:15:41 +01:00
|
|
|
ds.slug,
|
|
|
|
ds.name,
|
|
|
|
ds.slug,
|
|
|
|
{
|
|
|
|
'data-type': ds.type,
|
|
|
|
'data-maybe-datetimes': 'true' if ds.maybe_datetimes() else 'false',
|
|
|
|
},
|
2022-03-11 08:24:50 +01:00
|
|
|
]
|
2022-05-18 22:00:01 +02:00
|
|
|
if admin_accessible:
|
|
|
|
option[-1]['data-goto-url'] = ds.get_admin_url()
|
2021-02-16 14:15:41 +01:00
|
|
|
if ds.external == 'agenda':
|
|
|
|
nds_agenda_options.append(option)
|
2021-04-13 14:40:14 +02:00
|
|
|
elif ds.type == 'wcs:users':
|
|
|
|
nds_users_options.append(option)
|
2021-02-16 14:15:41 +01:00
|
|
|
else:
|
2022-03-11 08:24:50 +01:00
|
|
|
option.append(ds.category)
|
2021-02-16 14:15:41 +01:00
|
|
|
nds_options.append(option)
|
|
|
|
|
|
|
|
nds_agenda_options.sort(key=lambda x: misc.simplify(x[1]))
|
|
|
|
if nds_agenda_options:
|
|
|
|
options.append(OptGroup(_('Agendas')))
|
|
|
|
options.extend(nds_agenda_options)
|
|
|
|
|
2021-04-13 14:40:14 +02:00
|
|
|
nds_users_options.sort(key=lambda x: misc.simplify(x[1]))
|
|
|
|
if nds_users_options:
|
|
|
|
options.append(OptGroup(_('Users')))
|
|
|
|
options.extend(nds_users_options)
|
|
|
|
|
2020-11-13 17:30:10 +01:00
|
|
|
nds_options.sort(key=lambda x: misc.simplify(x[1]))
|
|
|
|
if nds_options:
|
2022-03-11 08:24:50 +01:00
|
|
|
nds_by_category_names = collections.defaultdict(list)
|
|
|
|
for nds in nds_options:
|
|
|
|
name = ''
|
|
|
|
if nds[-1]:
|
|
|
|
name = nds[-1].name
|
|
|
|
nds_by_category_names[name].append(nds[:-1])
|
|
|
|
category_names = list(nds_by_category_names.keys())
|
|
|
|
if len(category_names) == 1 and category_names[0] == '':
|
|
|
|
# no category found
|
|
|
|
options.append(OptGroup(_('Manually Configured Data Sources')))
|
|
|
|
options.extend(nds_options)
|
|
|
|
else:
|
|
|
|
# sort categories
|
|
|
|
category_names = sorted(category_names)
|
|
|
|
# datasources without categories at the end
|
|
|
|
if category_names[0] == '':
|
|
|
|
category_names = category_names[1:] + ['']
|
|
|
|
# group by category name
|
|
|
|
for name in category_names:
|
|
|
|
options.append(OptGroup(name or _('Without category')))
|
|
|
|
options.extend(nds_by_category_names[name])
|
2020-11-13 17:30:10 +01:00
|
|
|
|
2022-05-03 08:26:51 +02:00
|
|
|
generic_options = []
|
|
|
|
if 'json' in allowed_source_types:
|
|
|
|
generic_options.append(('json', _('JSON URL'), 'json', {'data-maybe-datetimes': 'true'}))
|
|
|
|
if 'jsonp' in allowed_source_types:
|
|
|
|
generic_options.append(('jsonp', _('JSONP URL'), 'jsonp'))
|
|
|
|
elif value.get('type') == 'jsonp':
|
|
|
|
generic_options.append(('jsonp', _('JSONP URL (deprecated)'), 'jsonp'))
|
|
|
|
if 'geojson' in allowed_source_types:
|
|
|
|
generic_options.append(('geojson', _('GeoJSON URL'), 'geojson'))
|
|
|
|
if 'python' in allowed_source_types:
|
2022-12-27 14:58:22 +01:00
|
|
|
generic_options.append(('formula', _('Python Expression (deprecated)'), 'python'))
|
2022-05-03 08:26:51 +02:00
|
|
|
elif value.get('type') == 'formula':
|
|
|
|
generic_options.append(('formula', _('Python Expression (deprecated)'), 'python'))
|
2022-12-16 11:43:11 +01:00
|
|
|
if 'jsonvalue' in allowed_source_types:
|
|
|
|
generic_options.append(('jsonvalue', _('JSON Expression'), 'jsonvalue'))
|
2022-05-03 08:26:51 +02:00
|
|
|
|
|
|
|
if len(options) > 1 and generic_options:
|
2020-11-13 17:30:10 +01:00
|
|
|
options.append(OptGroup(_('Generic Data Sources')))
|
2022-05-03 08:26:51 +02:00
|
|
|
options.extend(generic_options)
|
2012-05-24 09:13:27 +02:00
|
|
|
|
2018-03-03 18:42:42 +01:00
|
|
|
self.add(
|
|
|
|
SingleSelectWidget,
|
|
|
|
'type',
|
|
|
|
options=options,
|
|
|
|
value=value.get('type'),
|
|
|
|
attrs={'data-dynamic-display-parent': 'true'},
|
|
|
|
)
|
2023-10-07 22:04:16 +02:00
|
|
|
if len(options) > 50:
|
|
|
|
widget = self.get_widget('type')
|
|
|
|
widget.attrs['data-autocomplete'] = 'true'
|
|
|
|
get_response().add_javascript(['select2.js'])
|
2012-05-24 09:13:27 +02:00
|
|
|
|
|
|
|
self.parse()
|
|
|
|
if not self.value:
|
|
|
|
self.value = {}
|
|
|
|
|
2018-03-03 18:42:42 +01:00
|
|
|
self.add(
|
|
|
|
StringWidget,
|
|
|
|
'value',
|
|
|
|
value=value.get('value'),
|
|
|
|
size=80,
|
|
|
|
attrs={
|
|
|
|
'data-dynamic-display-child-of': 'data_source$type',
|
2022-12-16 11:43:11 +01:00
|
|
|
'data-dynamic-display-value-in': 'json|jsonp|geojson|python|jsonvalue',
|
2021-02-04 10:37:40 +01:00
|
|
|
},
|
2020-07-28 14:48:00 +02:00
|
|
|
)
|
2012-05-24 09:13:27 +02:00
|
|
|
|
|
|
|
self._parsed = False
|
|
|
|
|
|
|
|
def _parse(self, request):
|
|
|
|
values = {}
|
|
|
|
for name in ('type', 'value'):
|
|
|
|
value = self.get(name)
|
|
|
|
if value:
|
|
|
|
values[name] = value
|
2021-07-27 17:39:22 +02:00
|
|
|
|
|
|
|
if values.get('type') in ('json', 'jsonp', 'geojson'):
|
|
|
|
url = values.get('value') or ''
|
2023-08-13 17:50:41 +02:00
|
|
|
if url:
|
|
|
|
if Template.is_template_string(url):
|
|
|
|
try:
|
|
|
|
ComputedExpressionWidget.validate_template(url)
|
|
|
|
except ValidationError as e:
|
|
|
|
self.error = str(e)
|
|
|
|
else:
|
|
|
|
parsed = urllib.parse.urlparse(url)
|
|
|
|
if not (parsed.scheme and parsed.netloc):
|
|
|
|
self.error = _('Value must be a full URL.')
|
2021-07-27 17:39:22 +02:00
|
|
|
|
2019-10-11 10:31:30 +02:00
|
|
|
if values.get('type', '') in ('none', ''):
|
2012-05-24 09:13:27 +02:00
|
|
|
values = None
|
|
|
|
self.value = values or None
|
|
|
|
|
|
|
|
def render_content(self):
|
|
|
|
r = TemplateIO(html=True)
|
|
|
|
for widget in self.get_widgets():
|
|
|
|
r += widget.render_content()
|
|
|
|
return r.getvalue()
|
|
|
|
|
2016-03-25 19:46:49 +01:00
|
|
|
|
2023-02-14 16:38:04 +01:00
|
|
|
def get_cache_key(url, data_source):
|
|
|
|
cache_key = ':'.join(
|
|
|
|
(
|
|
|
|
url,
|
|
|
|
str(data_source.get('data_attribute') if data_source else ''),
|
|
|
|
str(data_source.get('id_attribute') if data_source else ''),
|
|
|
|
str(data_source.get('text_attribute') if data_source else ''),
|
|
|
|
str(data_source.get('id_property') if data_source else ''),
|
|
|
|
str(data_source.get('label_template_property') if data_source else ''),
|
|
|
|
)
|
|
|
|
)
|
2023-02-15 16:11:37 +01:00
|
|
|
return force_str(hashlib.md5(force_bytes(cache_key)).hexdigest())
|
2023-02-14 16:38:04 +01:00
|
|
|
|
|
|
|
|
2023-05-16 15:41:39 +02:00
|
|
|
def get_tupled_items(structured_items):
|
2016-03-25 19:46:49 +01:00
|
|
|
tupled_items = []
|
|
|
|
for item in structured_items:
|
2016-08-31 16:35:30 +02:00
|
|
|
tupled_items.append((str(item['id']), str(item['text']), str(item.get('key', item['id'])), item))
|
2015-06-04 22:45:53 +02:00
|
|
|
return tupled_items
|
|
|
|
|
2016-03-25 19:46:49 +01:00
|
|
|
|
2023-05-16 15:41:39 +02:00
|
|
|
def get_items(data_source, include_disabled=False, mode=None):
|
|
|
|
items = get_structured_items(data_source, mode=mode, include_disabled=include_disabled)
|
|
|
|
return get_tupled_items(items)
|
|
|
|
|
|
|
|
|
|
|
|
def get_carddef_items(data_source):
|
|
|
|
structured_items = get_structured_carddef_items(data_source, with_files_urls=True)
|
|
|
|
return get_tupled_items(structured_items)
|
|
|
|
|
|
|
|
|
2022-06-28 13:41:18 +02:00
|
|
|
def get_id_by_option_text(data_source, text_value):
|
|
|
|
data_source = get_object(data_source)
|
|
|
|
if data_source:
|
2024-02-09 14:37:27 +01:00
|
|
|
text_value = str(text_value)
|
2022-06-28 13:41:18 +02:00
|
|
|
if data_source.data_source.get('type') == 'json' and data_source.query_parameter:
|
|
|
|
url = data_source.get_json_query_url()
|
|
|
|
url += urllib.parse.quote(text_value)
|
|
|
|
items = request_json_items(url, data_source.extended_data_source)
|
|
|
|
else:
|
|
|
|
items = get_structured_items(data_source.extended_data_source, include_disabled=False)
|
|
|
|
|
|
|
|
# fallback to iterating on all options
|
2022-07-28 09:20:25 +02:00
|
|
|
for option in items or []:
|
2022-06-28 13:41:18 +02:00
|
|
|
# get raw value from display value
|
|
|
|
if option['text'] == text_value:
|
|
|
|
return str(option['id'])
|
|
|
|
|
|
|
|
|
2022-12-09 16:24:00 +01:00
|
|
|
def get_json_from_url(
|
|
|
|
url, data_source=None, log_message_part='JSON data source', raise_request_error=False, cache_duration=0
|
|
|
|
):
|
|
|
|
if cache_duration:
|
2023-02-14 16:38:04 +01:00
|
|
|
cache_key = 'data-source-cache-%s' % get_cache_key(url, data_source)
|
2022-12-09 16:24:00 +01:00
|
|
|
entries = cache.get(cache_key)
|
|
|
|
if entries is not None:
|
|
|
|
return entries
|
|
|
|
|
2023-01-05 18:40:46 +01:00
|
|
|
url = sign_url_auto_orig(url)
|
|
|
|
data_source = data_source or {}
|
|
|
|
data_key = data_source.get('data_attribute') or 'data'
|
|
|
|
geojson = data_source.get('type') == 'geojson'
|
|
|
|
error_summary = None
|
|
|
|
|
2020-03-31 14:45:41 +02:00
|
|
|
try:
|
2024-01-13 16:13:01 +01:00
|
|
|
entries = json.loads(misc.urlopen(url).read())
|
2020-03-31 14:45:41 +02:00
|
|
|
if not isinstance(entries, dict):
|
|
|
|
raise ValueError('not a json dict')
|
|
|
|
if entries.get('err') not in (None, 0, '0'):
|
2022-09-13 10:37:55 +02:00
|
|
|
details = []
|
|
|
|
for key in ['err_desc', 'err_class']:
|
|
|
|
if entries.get(key):
|
|
|
|
details.append('%s %s' % (key, entries[key]))
|
|
|
|
if not details or entries['err'] not in [1, '1']:
|
|
|
|
details.append('err %s' % entries['err'])
|
|
|
|
raise ValueError(', '.join(details))
|
2020-11-09 09:41:01 +01:00
|
|
|
if geojson:
|
|
|
|
if not isinstance(entries.get('features'), list):
|
|
|
|
raise ValueError('bad geojson format')
|
|
|
|
else:
|
2021-05-11 10:23:39 +02:00
|
|
|
# data_key can be "data.foo.bar.results"
|
|
|
|
keys = data_key.split('.')
|
|
|
|
data = entries
|
|
|
|
for key in keys[:-1]:
|
|
|
|
if not isinstance(data.get(key), dict):
|
|
|
|
raise ValueError('not a json dict with a %s list attribute' % data_key)
|
|
|
|
data = data[key]
|
|
|
|
if not isinstance(data.get(keys[-1]), list):
|
2020-11-09 09:41:01 +01:00
|
|
|
raise ValueError('not a json dict with a %s list attribute' % data_key)
|
2022-12-09 16:24:00 +01:00
|
|
|
if cache_duration:
|
|
|
|
cache.set(cache_key, entries, cache_duration)
|
2021-03-14 12:24:50 +01:00
|
|
|
return entries
|
2020-03-31 14:45:41 +02:00
|
|
|
except misc.ConnectionError as e:
|
2021-03-14 12:24:50 +01:00
|
|
|
error_summary = 'Error loading %s (%s)' % (log_message_part, str(e))
|
2020-03-31 14:45:41 +02:00
|
|
|
except (ValueError, TypeError) as e:
|
2021-03-14 12:24:50 +01:00
|
|
|
error_summary = 'Error reading %s output (%s)' % (log_message_part, str(e))
|
|
|
|
|
2021-07-06 16:57:04 +02:00
|
|
|
if data_source:
|
|
|
|
get_publisher().record_error(
|
|
|
|
error_summary,
|
2021-03-14 12:24:50 +01:00
|
|
|
context='[DATASOURCE]',
|
|
|
|
notify=data_source.get('notify_on_errors'),
|
|
|
|
record=data_source.get('record_on_errors'),
|
|
|
|
)
|
|
|
|
|
2022-11-02 14:18:09 +01:00
|
|
|
if raise_request_error:
|
|
|
|
raise RequestError('Error retrieving data (%s)' % error_summary)
|
2021-03-14 12:24:50 +01:00
|
|
|
return None
|
2020-11-09 09:41:01 +01:00
|
|
|
|
|
|
|
|
2022-12-09 16:24:00 +01:00
|
|
|
def request_json_items(url, data_source, cache_duration=0):
|
|
|
|
entries = get_json_from_url(url, data_source, cache_duration=cache_duration)
|
2020-11-09 09:41:01 +01:00
|
|
|
if entries is None:
|
|
|
|
return None
|
|
|
|
data_key = data_source.get('data_attribute') or 'data'
|
2020-10-09 15:35:00 +02:00
|
|
|
id_attribute = data_source.get('id_attribute') or 'id'
|
|
|
|
text_attribute = data_source.get('text_attribute') or 'text'
|
2021-05-11 10:23:39 +02:00
|
|
|
# data_key can be "data.foo.bar.results"
|
|
|
|
keys = data_key.split('.')
|
|
|
|
for key in keys:
|
|
|
|
entries = entries[key]
|
2020-11-09 09:41:01 +01:00
|
|
|
items = []
|
2021-05-11 10:23:39 +02:00
|
|
|
for item in entries:
|
2020-10-09 15:35:00 +02:00
|
|
|
# skip malformed items
|
|
|
|
if not isinstance(item, dict):
|
|
|
|
continue
|
|
|
|
if item.get(id_attribute) is None or item.get(id_attribute) == '':
|
|
|
|
continue
|
|
|
|
item['id'] = item[id_attribute]
|
|
|
|
if text_attribute not in item:
|
2020-11-16 17:20:57 +01:00
|
|
|
item['text'] = str(item['id'])
|
2020-07-28 14:48:00 +02:00
|
|
|
else:
|
2020-10-09 15:35:00 +02:00
|
|
|
item['text'] = item[text_attribute]
|
2022-01-04 19:54:01 +01:00
|
|
|
if not isinstance(item['text'], str):
|
|
|
|
continue
|
2020-10-09 15:35:00 +02:00
|
|
|
items.append(item)
|
|
|
|
return items
|
|
|
|
|
|
|
|
|
2022-12-09 16:24:00 +01:00
|
|
|
def request_geojson_items(url, data_source, cache_duration=0):
|
|
|
|
entries = get_json_from_url(url, data_source, cache_duration=cache_duration)
|
2020-11-09 09:41:01 +01:00
|
|
|
if entries is None:
|
2020-03-31 14:45:41 +02:00
|
|
|
return None
|
|
|
|
items = []
|
2020-10-09 15:35:00 +02:00
|
|
|
id_property = data_source.get('id_property') or 'id'
|
|
|
|
for item in entries.get('features'):
|
2020-11-07 23:42:15 +01:00
|
|
|
if id_property == 'id' and 'id' in item:
|
|
|
|
# If a Feature has a commonly used identifier, that identifier
|
|
|
|
# SHOULD be included as a member of the Feature object with the
|
|
|
|
# name "id", and the value of this member is either a JSON string
|
|
|
|
# or number.
|
|
|
|
# -- https://tools.ietf.org/html/rfc7946#section-3.2
|
|
|
|
pass
|
|
|
|
elif item.get('properties', {}).get(id_property):
|
|
|
|
item['id'] = item['properties'][id_property]
|
|
|
|
else:
|
|
|
|
# missing id property, skip entry
|
2020-10-09 15:35:00 +02:00
|
|
|
continue
|
|
|
|
try:
|
|
|
|
item['text'] = Template(data_source.get('label_template_property') or '{{ text }}').render(
|
|
|
|
item['properties']
|
|
|
|
)
|
|
|
|
except (TemplateSyntaxError, VariableDoesNotExist):
|
|
|
|
pass
|
|
|
|
if not item.get('text'):
|
|
|
|
item['text'] = item['id']
|
|
|
|
items.append(item)
|
2020-03-31 14:45:41 +02:00
|
|
|
return items
|
|
|
|
|
|
|
|
|
2023-07-19 14:37:19 +02:00
|
|
|
def get_structured_items(
|
|
|
|
data_source, mode=None, include_disabled=True, raise_on_error=False, with_file_urls=False
|
|
|
|
):
|
|
|
|
items = _get_structured_items(
|
|
|
|
data_source, mode=mode, raise_on_error=raise_on_error, with_file_urls=with_file_urls
|
|
|
|
)
|
2021-09-06 10:08:58 +02:00
|
|
|
if not include_disabled:
|
|
|
|
items = [i for i in items if not i.get('disabled')]
|
|
|
|
return items
|
|
|
|
|
|
|
|
|
2023-05-16 15:41:39 +02:00
|
|
|
def get_structured_carddef_items(data_source, with_files_urls=False):
|
|
|
|
from wcs.carddef import CardDef
|
|
|
|
|
|
|
|
return CardDef.get_data_source_items(data_source['type'], with_files_urls=with_files_urls)
|
|
|
|
|
|
|
|
|
2023-07-19 14:37:19 +02:00
|
|
|
def _get_structured_items(data_source, mode=None, raise_on_error=False, with_file_urls=False):
|
2019-08-13 09:54:29 +02:00
|
|
|
if data_source.get('type') and data_source.get('type').startswith('carddef:'):
|
|
|
|
# cards
|
2023-07-19 14:37:19 +02:00
|
|
|
return get_structured_carddef_items(data_source, with_files_urls=with_file_urls)
|
2019-08-13 09:54:29 +02:00
|
|
|
|
2022-12-09 16:24:00 +01:00
|
|
|
cache_duration = 0
|
2022-12-16 11:43:11 +01:00
|
|
|
if data_source.get('type') not in ('json', 'jsonp', 'geojson', 'formula', 'jsonvalue', 'wcs:users'):
|
2018-09-22 11:24:43 +02:00
|
|
|
# named data source
|
2023-04-25 13:39:50 +02:00
|
|
|
named_data_source = NamedDataSource.get_by_slug(data_source['type'], stub_fallback=True)
|
2018-09-22 11:24:43 +02:00
|
|
|
if named_data_source.cache_duration:
|
|
|
|
cache_duration = int(named_data_source.cache_duration)
|
2020-07-28 14:48:00 +02:00
|
|
|
data_source = named_data_source.extended_data_source
|
2018-09-22 11:24:43 +02:00
|
|
|
|
2021-04-13 14:40:14 +02:00
|
|
|
if data_source.get('type') == 'wcs:users':
|
|
|
|
users = get_publisher().user_class.get_users_with_roles(
|
|
|
|
included_roles=data_source.get('included_roles'),
|
|
|
|
excluded_roles=data_source.get('excluded_roles'),
|
2023-12-06 09:59:25 +01:00
|
|
|
include_disabled_users=data_source.get('include_disabled_users'),
|
2021-04-13 14:40:14 +02:00
|
|
|
order_by='name',
|
|
|
|
)
|
2022-04-01 21:03:37 +02:00
|
|
|
|
2023-12-06 09:59:25 +01:00
|
|
|
return [get_data_source_entry_from_user(u) for u in users]
|
2021-04-13 14:40:14 +02:00
|
|
|
|
2022-12-16 11:43:11 +01:00
|
|
|
if data_source.get('type') == 'jsonvalue':
|
2023-09-21 16:20:32 +02:00
|
|
|
value = data_source.get('value')
|
|
|
|
if value is None:
|
|
|
|
get_publisher().record_error(
|
|
|
|
'JSON data source (%r) gave a non usable result' % data_source.get('value'),
|
|
|
|
context='[DATASOURCE]',
|
|
|
|
notify=data_source.get('notify_on_errors'),
|
|
|
|
record=data_source.get('record_on_errors'),
|
|
|
|
)
|
|
|
|
return []
|
|
|
|
variables = get_publisher().substitutions.get_context_variables(mode=mode)
|
|
|
|
try:
|
|
|
|
value = Template(value, raises=True).render(context=variables)
|
|
|
|
except (TemplateError, TemplateSyntaxError):
|
|
|
|
get_publisher().record_error(
|
|
|
|
'JSON data source (%r) gave a template syntax error' % data_source.get('value'),
|
|
|
|
context='[DATASOURCE]',
|
|
|
|
notify=data_source.get('notify_on_errors'),
|
|
|
|
record=data_source.get('record_on_errors'),
|
|
|
|
)
|
|
|
|
return []
|
2022-12-16 11:43:11 +01:00
|
|
|
try:
|
2023-09-21 16:20:32 +02:00
|
|
|
value = json.loads(value)
|
2023-03-31 16:44:43 +02:00
|
|
|
except (json.JSONDecodeError, TypeError):
|
2022-12-16 11:43:11 +01:00
|
|
|
get_publisher().record_error(
|
|
|
|
'JSON data source (%r) gave a non usable result' % data_source.get('value'),
|
|
|
|
context='[DATASOURCE]',
|
|
|
|
notify=data_source.get('notify_on_errors'),
|
|
|
|
record=data_source.get('record_on_errors'),
|
|
|
|
)
|
|
|
|
return []
|
|
|
|
try:
|
|
|
|
if not isinstance(value, list):
|
|
|
|
raise ValueError
|
|
|
|
for item in value:
|
|
|
|
if not isinstance(item, dict):
|
|
|
|
raise ValueError
|
|
|
|
if all(str(x.get('id', '')) and x.get('text') for x in value):
|
|
|
|
return value
|
|
|
|
raise ValueError
|
|
|
|
except ValueError:
|
|
|
|
get_publisher().record_error(
|
|
|
|
'JSON data source (%r) gave a non usable result' % data_source.get('value'),
|
|
|
|
context='[DATASOURCE]',
|
|
|
|
notify=data_source.get('notify_on_errors'),
|
|
|
|
record=data_source.get('record_on_errors'),
|
|
|
|
)
|
|
|
|
return []
|
2012-05-24 09:13:27 +02:00
|
|
|
if data_source.get('type') == 'formula':
|
2015-06-04 22:45:53 +02:00
|
|
|
# the result of a python expression, it must be a list.
|
|
|
|
# - of strings
|
|
|
|
# - of dictionaries, in which case it has to have both a "id" and a
|
|
|
|
# "text" keys
|
|
|
|
# - of lists or tuples, in which case it may have up to three elements:
|
|
|
|
# - three elements, (id, text, key)
|
|
|
|
# - two elements, (id, text)
|
|
|
|
# - a single element, (id,)
|
2018-10-10 09:50:16 +02:00
|
|
|
variables = get_publisher().substitutions.get_context_variables(mode=mode)
|
2017-07-29 14:39:33 +02:00
|
|
|
global_eval_dict = get_publisher().get_global_eval_dict()
|
|
|
|
global_eval_dict.update(data_source_functions)
|
2012-05-26 08:27:45 +02:00
|
|
|
try:
|
2022-04-16 17:24:46 +02:00
|
|
|
value = misc.eval_python(data_source.get('value'), global_eval_dict, variables)
|
2022-04-01 18:59:19 +02:00
|
|
|
if not isinstance(value, collections.abc.Iterable):
|
2021-07-06 16:57:04 +02:00
|
|
|
get_publisher().record_error(
|
|
|
|
'Python data source (%r) gave a non-iterable result' % data_source.get('value'),
|
2021-03-14 12:24:50 +01:00
|
|
|
context='[DATASOURCE]',
|
|
|
|
notify=data_source.get('notify_on_errors'),
|
|
|
|
record=data_source.get('record_on_errors'),
|
2020-07-28 14:48:00 +02:00
|
|
|
)
|
2014-01-19 17:46:03 +01:00
|
|
|
return []
|
2015-06-04 22:45:53 +02:00
|
|
|
if len(value) == 0:
|
|
|
|
return []
|
2021-03-22 14:26:44 +01:00
|
|
|
if isinstance(value[0], (list, tuple)):
|
2015-06-04 22:45:53 +02:00
|
|
|
if len(value[0]) >= 3:
|
|
|
|
return [{'id': x[0], 'text': x[1], 'key': x[2]} for x in value]
|
|
|
|
elif len(value[0]) == 2:
|
|
|
|
return [{'id': x[0], 'text': x[1]} for x in value]
|
|
|
|
elif len(value[0]) == 1:
|
|
|
|
return [{'id': x[0], 'text': x[0]} for x in value]
|
|
|
|
return value
|
2021-09-20 08:18:11 +02:00
|
|
|
elif isinstance(unlazy(value[0]), str):
|
2015-06-04 22:45:53 +02:00
|
|
|
return [{'id': x, 'text': x} for x in value]
|
2021-09-20 08:18:11 +02:00
|
|
|
elif isinstance(value[0], dict):
|
2022-06-16 20:26:14 +02:00
|
|
|
if all(str(x.get('id', '')) and x.get('text') for x in value):
|
|
|
|
return value
|
2021-09-20 08:18:11 +02:00
|
|
|
get_publisher().record_error(
|
|
|
|
'Python data source (%r) gave a non usable result' % data_source.get('value'),
|
|
|
|
context='[DATASOURCE]',
|
|
|
|
notify=data_source.get('notify_on_errors'),
|
|
|
|
record=data_source.get('record_on_errors'),
|
|
|
|
)
|
|
|
|
return []
|
2021-03-14 12:24:50 +01:00
|
|
|
except Exception as exc:
|
2021-07-06 16:57:04 +02:00
|
|
|
get_publisher().record_error(
|
|
|
|
'Failed to eval() Python data source (%r)' % data_source.get('value'),
|
|
|
|
exception=exc,
|
2021-03-14 12:24:50 +01:00
|
|
|
context='[DATASOURCE]',
|
|
|
|
notify=data_source.get('notify_on_errors'),
|
|
|
|
record=data_source.get('record_on_errors'),
|
|
|
|
)
|
2012-05-26 08:27:45 +02:00
|
|
|
return []
|
2020-07-28 14:48:00 +02:00
|
|
|
elif data_source.get('type') in ['json', 'geojson']:
|
2015-06-04 22:45:53 +02:00
|
|
|
# the content available at a json URL, it must answer with a dict with
|
|
|
|
# a 'data' key holding the list of items, each of them being a dict
|
|
|
|
# with at least both an "id" and a "text" key.
|
2020-07-28 14:48:00 +02:00
|
|
|
geojson = data_source.get('type') == 'geojson'
|
2022-10-13 18:21:57 +02:00
|
|
|
url = get_json_url(data_source)
|
2013-04-16 18:29:50 +02:00
|
|
|
if not url:
|
|
|
|
return []
|
2018-09-21 12:39:20 +02:00
|
|
|
|
|
|
|
request = get_request()
|
2023-02-14 16:38:04 +01:00
|
|
|
cache_key = get_cache_key(url, data_source)
|
|
|
|
if hasattr(request, 'datasources_cache') and cache_key in request.datasources_cache:
|
|
|
|
return request.datasources_cache[cache_key]
|
2018-09-21 12:39:20 +02:00
|
|
|
|
2020-10-09 15:35:00 +02:00
|
|
|
if geojson:
|
2022-12-09 16:24:00 +01:00
|
|
|
items = request_geojson_items(url, data_source, cache_duration=cache_duration)
|
2020-10-09 15:35:00 +02:00
|
|
|
else:
|
2022-12-09 16:24:00 +01:00
|
|
|
items = request_json_items(url, data_source, cache_duration=cache_duration)
|
2020-03-31 14:45:41 +02:00
|
|
|
if items is None:
|
2021-11-09 09:52:38 +01:00
|
|
|
if raise_on_error:
|
|
|
|
raise DataSourceError('datasource %s is unavailable' % url)
|
2020-03-31 14:45:41 +02:00
|
|
|
return []
|
|
|
|
if hasattr(request, 'datasources_cache'):
|
2023-02-14 16:38:04 +01:00
|
|
|
request.datasources_cache[cache_key] = items
|
2020-03-31 14:45:41 +02:00
|
|
|
return items
|
2012-05-24 09:13:27 +02:00
|
|
|
return []
|
2015-06-05 16:08:04 +02:00
|
|
|
|
|
|
|
|
2022-10-13 18:21:57 +02:00
|
|
|
def get_json_url(data_source):
|
|
|
|
url = data_source.get('value')
|
|
|
|
if not url:
|
|
|
|
return None
|
|
|
|
url = url.strip()
|
|
|
|
if Template.is_template_string(url):
|
|
|
|
vars = get_publisher().substitutions.get_context_variables(mode='lazy')
|
|
|
|
url = get_variadic_url(url, vars)
|
|
|
|
if data_source.get('qs_data'): # merge qs_data into url
|
|
|
|
from wcs.workflows import WorkflowStatusItem
|
|
|
|
|
|
|
|
parsed = urllib.parse.urlparse(url)
|
|
|
|
qs = list(urllib.parse.parse_qsl(parsed.query))
|
|
|
|
for key, value in data_source['qs_data'].items():
|
|
|
|
try:
|
|
|
|
value = WorkflowStatusItem.compute(value, raises=True, record_errors=False)
|
|
|
|
value = str(value) if value is not None else ''
|
|
|
|
except Exception as e:
|
|
|
|
get_publisher().record_error(
|
|
|
|
_(
|
|
|
|
'Failed to compute value "%(value)s" for "%(query)s" query parameter'
|
|
|
|
% {'value': value, 'query': key}
|
|
|
|
),
|
|
|
|
context='[DATASOURCE]',
|
|
|
|
exception=e,
|
|
|
|
notify=data_source.get('notify_on_errors'),
|
|
|
|
record=data_source.get('record_on_errors'),
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
key = force_str(key)
|
|
|
|
value = force_str(value)
|
|
|
|
qs.append((key, value))
|
|
|
|
qs = urllib.parse.urlencode(qs)
|
|
|
|
url = urllib.parse.urlunparse(parsed[:4] + (qs,) + parsed[5:6])
|
|
|
|
return url
|
|
|
|
|
|
|
|
|
2015-06-05 16:08:04 +02:00
|
|
|
def get_real(data_source):
|
|
|
|
if not data_source:
|
|
|
|
return None
|
|
|
|
ds_type = data_source.get('type')
|
2022-12-16 11:43:11 +01:00
|
|
|
if ds_type in ('json', 'jsonp', 'geojson', 'formula', 'jsonvalue'):
|
2015-06-05 16:08:04 +02:00
|
|
|
return data_source
|
2019-08-13 09:54:29 +02:00
|
|
|
if ds_type and ds_type.startswith('carddef:'):
|
|
|
|
return data_source
|
2023-04-25 13:39:50 +02:00
|
|
|
return NamedDataSource.get_by_slug(ds_type, stub_fallback=True).data_source
|
2015-06-05 16:08:04 +02:00
|
|
|
|
|
|
|
|
2020-11-09 09:41:01 +01:00
|
|
|
def get_object(data_source, ignore_errors=True):
|
2019-04-04 14:43:42 +02:00
|
|
|
if not data_source:
|
|
|
|
return None
|
|
|
|
ds_type = data_source.get('type')
|
2020-09-28 10:56:55 +02:00
|
|
|
if ds_type is None:
|
|
|
|
return None
|
2022-12-16 11:43:11 +01:00
|
|
|
if ds_type in ('json', 'jsonp', 'geojson', 'formula', 'jsonvalue'):
|
2019-04-04 14:43:42 +02:00
|
|
|
named_data_source = NamedDataSource()
|
|
|
|
named_data_source.data_source = data_source
|
|
|
|
return named_data_source
|
2019-08-13 09:54:29 +02:00
|
|
|
if ds_type.startswith('carddef:'):
|
|
|
|
named_data_source = NamedDataSource()
|
|
|
|
named_data_source.data_source = data_source
|
|
|
|
return named_data_source
|
2023-04-25 13:39:50 +02:00
|
|
|
return NamedDataSource.get_by_slug(ds_type, ignore_errors=ignore_errors, stub_fallback=True)
|
2019-04-04 14:43:42 +02:00
|
|
|
|
|
|
|
|
2015-06-05 16:08:04 +02:00
|
|
|
class NamedDataSource(XmlStorableObject):
|
|
|
|
_names = 'datasources'
|
2020-06-12 14:41:31 +02:00
|
|
|
_indexes = ['slug']
|
2020-08-10 13:59:51 +02:00
|
|
|
xml_root_node = 'datasource'
|
2022-01-09 15:30:27 +01:00
|
|
|
backoffice_class = 'wcs.admin.data_sources.NamedDataSourcePage'
|
2022-03-01 16:05:45 +01:00
|
|
|
verbose_name = _('Data source')
|
|
|
|
verbose_name_plural = _('Data sources')
|
2015-06-05 16:08:04 +02:00
|
|
|
|
|
|
|
name = None
|
|
|
|
slug = None
|
2024-04-07 10:33:22 +02:00
|
|
|
documentation = None
|
2015-06-05 16:08:04 +02:00
|
|
|
data_source = None
|
2018-09-22 11:24:43 +02:00
|
|
|
cache_duration = None
|
2019-04-04 15:22:50 +02:00
|
|
|
query_parameter = None
|
|
|
|
id_parameter = None
|
2020-10-09 15:35:00 +02:00
|
|
|
data_attribute = None
|
|
|
|
id_attribute = None
|
|
|
|
text_attribute = None
|
2020-07-28 14:48:00 +02:00
|
|
|
id_property = None
|
2022-04-08 16:04:25 +02:00
|
|
|
qs_data = None
|
2020-07-28 14:48:00 +02:00
|
|
|
label_template_property = None
|
2021-02-16 10:27:57 +01:00
|
|
|
external = None
|
|
|
|
external_status = None
|
2021-03-14 12:24:50 +01:00
|
|
|
notify_on_errors = False
|
|
|
|
record_on_errors = False
|
2021-04-13 14:40:14 +02:00
|
|
|
users_included_roles = None
|
|
|
|
users_excluded_roles = None
|
2022-03-11 08:24:50 +01:00
|
|
|
category_id = None
|
2022-04-01 21:03:37 +02:00
|
|
|
include_disabled_users = False
|
2015-06-05 16:08:04 +02:00
|
|
|
|
2021-12-31 14:58:15 +01:00
|
|
|
SLUG_DASH = '_'
|
|
|
|
|
2015-06-05 16:08:04 +02:00
|
|
|
# declarations for serialization
|
|
|
|
XML_NODES = [
|
|
|
|
('name', 'str'),
|
|
|
|
('slug', 'str'),
|
2024-04-07 10:33:22 +02:00
|
|
|
('description', 'str'), # legacy
|
|
|
|
('documentation', 'str'),
|
2018-09-22 11:24:43 +02:00
|
|
|
('cache_duration', 'str'),
|
2019-04-04 15:22:50 +02:00
|
|
|
('query_parameter', 'str'),
|
|
|
|
('id_parameter', 'str'),
|
2020-10-09 15:35:00 +02:00
|
|
|
('data_attribute', 'str'),
|
|
|
|
('id_attribute', 'str'),
|
|
|
|
('text_attribute', 'str'),
|
2020-07-28 14:48:00 +02:00
|
|
|
('id_property', 'str'),
|
2022-04-08 16:04:25 +02:00
|
|
|
('qs_data', 'qs_data'),
|
2020-07-28 14:48:00 +02:00
|
|
|
('label_template_property', 'str'),
|
2021-02-16 10:27:57 +01:00
|
|
|
('external', 'str'),
|
|
|
|
('external_status', 'str'),
|
2018-09-22 11:24:43 +02:00
|
|
|
('data_source', 'data_source'),
|
2021-03-14 12:24:50 +01:00
|
|
|
('notify_on_errors', 'bool'),
|
|
|
|
('record_on_errors', 'bool'),
|
2023-12-19 11:40:23 +01:00
|
|
|
('users_included_roles', 'ds_roles'),
|
|
|
|
('users_excluded_roles', 'ds_roles'),
|
2022-04-01 21:03:37 +02:00
|
|
|
('include_disabled_users', 'bool'),
|
2018-09-22 11:24:43 +02:00
|
|
|
]
|
2015-06-05 16:08:04 +02:00
|
|
|
|
|
|
|
def __init__(self, name=None):
|
|
|
|
StorableObject.__init__(self)
|
|
|
|
self.name = name
|
|
|
|
|
2023-05-30 13:52:24 +02:00
|
|
|
def migrate(self):
|
|
|
|
changed = False
|
|
|
|
|
|
|
|
# 2023-05-30
|
|
|
|
publisher = get_publisher()
|
|
|
|
if self.agenda_ds and has_chrono(publisher):
|
|
|
|
url = (self.data_source or {}).get('value')
|
2023-06-16 15:51:50 +02:00
|
|
|
if url and not url.startswith('{{'):
|
2023-05-30 13:52:24 +02:00
|
|
|
self.data_source['value'] = translate_url(publisher, url)
|
|
|
|
changed = True
|
|
|
|
|
2024-04-07 10:33:22 +02:00
|
|
|
if getattr(self, 'description', None): # 2024-04-07
|
|
|
|
self.documentation = getattr(self, 'description')
|
|
|
|
self.description = None
|
|
|
|
changed = True
|
|
|
|
|
2023-05-30 13:52:24 +02:00
|
|
|
if changed:
|
|
|
|
self.store(comment=_('Automatic update'), snapshot_store_user=False)
|
|
|
|
|
2022-03-11 08:24:50 +01:00
|
|
|
@property
|
|
|
|
def category(self):
|
|
|
|
return DataSourceCategory.get(self.category_id, ignore_errors=True)
|
|
|
|
|
|
|
|
@category.setter
|
|
|
|
def category(self, category):
|
|
|
|
if category:
|
|
|
|
self.category_id = category.id
|
|
|
|
elif self.category_id:
|
|
|
|
self.category_id = None
|
|
|
|
|
2019-04-04 14:43:42 +02:00
|
|
|
@property
|
|
|
|
def type(self):
|
2021-04-13 14:40:14 +02:00
|
|
|
if not self.data_source:
|
|
|
|
return None
|
2019-04-04 14:43:42 +02:00
|
|
|
return self.data_source.get('type')
|
|
|
|
|
2020-07-28 14:48:00 +02:00
|
|
|
@property
|
|
|
|
def extended_data_source(self):
|
2021-05-27 16:17:22 +02:00
|
|
|
notify_on_errors = self.notify_on_errors
|
|
|
|
record_on_errors = self.record_on_errors
|
|
|
|
if getattr(get_request(), 'disable_error_notifications', None) is True:
|
|
|
|
notify_on_errors = False
|
|
|
|
record_on_errors = False
|
2020-10-09 15:35:00 +02:00
|
|
|
if self.type == 'geojson':
|
|
|
|
data_source = self.data_source.copy()
|
|
|
|
data_source.update(
|
|
|
|
{
|
|
|
|
'id_property': self.id_property,
|
|
|
|
'label_template_property': self.label_template_property,
|
2021-05-27 16:17:22 +02:00
|
|
|
'notify_on_errors': notify_on_errors,
|
|
|
|
'record_on_errors': record_on_errors,
|
2020-10-09 15:35:00 +02:00
|
|
|
}
|
|
|
|
)
|
|
|
|
return data_source
|
|
|
|
if self.type == 'json':
|
|
|
|
data_source = self.data_source.copy()
|
|
|
|
data_source.update(
|
|
|
|
{
|
|
|
|
'data_attribute': self.data_attribute,
|
|
|
|
'id_attribute': self.id_attribute,
|
|
|
|
'text_attribute': self.text_attribute,
|
2022-04-08 16:04:25 +02:00
|
|
|
'qs_data': self.qs_data,
|
2021-05-27 16:17:22 +02:00
|
|
|
'notify_on_errors': notify_on_errors,
|
|
|
|
'record_on_errors': record_on_errors,
|
2020-10-09 15:35:00 +02:00
|
|
|
}
|
|
|
|
)
|
|
|
|
return data_source
|
2021-04-13 14:40:14 +02:00
|
|
|
if self.type == 'wcs:users':
|
|
|
|
data_source = self.data_source.copy()
|
|
|
|
data_source.update(
|
|
|
|
{
|
|
|
|
'included_roles': self.users_included_roles,
|
|
|
|
'excluded_roles': self.users_excluded_roles,
|
2022-04-01 21:03:37 +02:00
|
|
|
'include_disabled_users': self.include_disabled_users,
|
2021-04-13 14:40:14 +02:00
|
|
|
}
|
|
|
|
)
|
|
|
|
return data_source
|
2020-10-09 15:35:00 +02:00
|
|
|
return self.data_source
|
2020-07-28 14:48:00 +02:00
|
|
|
|
2023-08-12 17:18:20 +02:00
|
|
|
def can_geojson(self):
|
|
|
|
return bool(self.type == 'geojson')
|
|
|
|
|
2019-04-04 14:43:42 +02:00
|
|
|
def can_jsonp(self):
|
|
|
|
if self.type == 'jsonp':
|
|
|
|
return True
|
2019-04-04 15:22:50 +02:00
|
|
|
if self.type == 'json' and self.query_parameter:
|
|
|
|
return True
|
2020-09-29 21:07:22 +02:00
|
|
|
if self.type and self.type.startswith('carddef:'):
|
|
|
|
return True
|
2019-04-04 14:43:42 +02:00
|
|
|
return False
|
|
|
|
|
2021-01-11 13:57:32 +01:00
|
|
|
def maybe_datetimes(self):
|
2023-08-12 17:18:20 +02:00
|
|
|
if self.type == 'json':
|
|
|
|
if 'datetimes' in (self.data_source.get('value') or ''):
|
|
|
|
return True
|
|
|
|
if not self.id and Template.is_template_string(self.data_source.get('value') or ''):
|
|
|
|
# unsaved datasource is used when checking display mode; we allow any template
|
|
|
|
# in that case.
|
|
|
|
return True
|
|
|
|
return False
|
2021-01-11 13:57:32 +01:00
|
|
|
|
2023-08-11 10:58:32 +02:00
|
|
|
def can_images(self):
|
|
|
|
if self.type and self.type.startswith('carddef:'):
|
|
|
|
from wcs.carddef import CardDef
|
|
|
|
|
|
|
|
carddef = CardDef.get_by_slug(self.type.split(':')[1])
|
|
|
|
return carddef.has_image_field()
|
|
|
|
return False
|
|
|
|
|
2022-04-08 16:04:25 +02:00
|
|
|
@property
|
|
|
|
def agenda_ds(self):
|
|
|
|
return self.external in ['agenda', 'agenda_manual']
|
|
|
|
|
|
|
|
@property
|
|
|
|
def agenda_ds_origin(self):
|
|
|
|
if self.external != 'agenda_manual':
|
|
|
|
return
|
|
|
|
for datasource in NamedDataSource.select():
|
|
|
|
if datasource.external != 'agenda':
|
|
|
|
continue
|
|
|
|
if datasource.data_source.get('value') == self.data_source.get('value'):
|
|
|
|
return datasource
|
|
|
|
|
2023-08-03 10:44:35 +02:00
|
|
|
def store(self, comment=None, snapshot_store_user=True, application=None, *args, **kwargs):
|
2020-08-15 17:00:39 +02:00
|
|
|
assert not self.is_readonly()
|
2015-06-05 16:08:04 +02:00
|
|
|
if self.slug is None:
|
|
|
|
# set slug if it's not yet there
|
|
|
|
self.slug = self.get_new_slug()
|
2021-07-01 11:32:31 +02:00
|
|
|
super().store(*args, **kwargs)
|
2020-08-10 13:10:04 +02:00
|
|
|
if get_publisher().snapshot_class:
|
2022-03-16 12:16:39 +01:00
|
|
|
get_publisher().snapshot_class.snap(
|
2023-08-03 10:44:35 +02:00
|
|
|
instance=self, comment=comment, store_user=snapshot_store_user, application=application
|
2022-03-16 12:16:39 +01:00
|
|
|
)
|
2015-06-05 16:08:04 +02:00
|
|
|
|
2022-05-18 22:00:01 +02:00
|
|
|
@classmethod
|
|
|
|
def is_admin_accessible(cls):
|
|
|
|
for section in ('settings', 'forms', 'workflows'):
|
|
|
|
if get_publisher().get_backoffice_root().is_accessible(section):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2020-08-11 15:15:47 +02:00
|
|
|
def get_admin_url(self):
|
|
|
|
base_url = get_publisher().get_backoffice_url()
|
2022-03-28 21:23:25 +02:00
|
|
|
if get_request():
|
|
|
|
for section in ('settings', 'forms', 'workflows'):
|
2022-10-24 16:17:34 +02:00
|
|
|
bo_root = get_publisher().get_backoffice_root()
|
|
|
|
if bo_root and bo_root.is_accessible(section):
|
2022-03-28 21:23:25 +02:00
|
|
|
return '%s/%s/data-sources/%s/' % (base_url, section, self.id)
|
2020-08-11 15:15:47 +02:00
|
|
|
# fallback to settings section
|
|
|
|
section = 'settings'
|
|
|
|
return '%s/%s/data-sources/%s/' % (base_url, section, self.id)
|
|
|
|
|
2024-01-13 16:08:33 +01:00
|
|
|
def export_data_source_to_xml(self, element, attribute_name, **kwargs):
|
2015-06-05 16:08:04 +02:00
|
|
|
data_source = getattr(self, attribute_name)
|
|
|
|
ET.SubElement(element, 'type').text = data_source.get('type')
|
2024-01-13 16:08:33 +01:00
|
|
|
ET.SubElement(element, 'value').text = data_source.get('value') or ''
|
2015-06-05 16:08:04 +02:00
|
|
|
|
2021-04-17 13:08:16 +02:00
|
|
|
def import_data_source_from_xml(self, element, **kwargs):
|
2015-06-05 16:08:04 +02:00
|
|
|
return {
|
2019-11-14 10:38:00 +01:00
|
|
|
'type': force_str(element.find('type').text),
|
|
|
|
'value': force_str(element.find('value').text or ''),
|
2015-06-05 16:08:04 +02:00
|
|
|
}
|
|
|
|
|
2022-04-08 16:04:25 +02:00
|
|
|
def export_qs_data_to_xml(self, element, attribute_name, *args, **kwargs):
|
|
|
|
if not self.qs_data:
|
|
|
|
return
|
2023-03-15 16:12:35 +01:00
|
|
|
for key, value in self.qs_data.items():
|
2022-04-08 16:04:25 +02:00
|
|
|
item = ET.SubElement(element, 'item')
|
|
|
|
if isinstance(key, str):
|
2023-02-25 10:11:37 +01:00
|
|
|
ET.SubElement(item, 'name').text = force_str(key)
|
2022-04-08 16:04:25 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError('unknown type for key (%r)' % key)
|
|
|
|
if isinstance(value, str):
|
2023-02-25 10:11:37 +01:00
|
|
|
ET.SubElement(item, 'value').text = force_str(value)
|
2022-04-08 16:04:25 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError('unknown type for value (%r)' % key)
|
|
|
|
|
|
|
|
def import_qs_data_from_xml(self, element, **kwargs):
|
|
|
|
if element is None:
|
|
|
|
return
|
|
|
|
qs_data = {}
|
|
|
|
for item in element.findall('item'):
|
|
|
|
key = force_str(item.find('name').text)
|
|
|
|
value = force_str(item.find('value').text or '')
|
|
|
|
qs_data[key] = value
|
|
|
|
return qs_data
|
|
|
|
|
2022-03-11 08:24:50 +01:00
|
|
|
def get_dependencies(self):
|
|
|
|
yield self.category
|
|
|
|
|
|
|
|
def export_to_xml(self, include_id=False):
|
|
|
|
root = super().export_to_xml(include_id=include_id)
|
|
|
|
DataSourceCategory.object_category_xml_export(self, root, include_id=include_id)
|
|
|
|
return root
|
|
|
|
|
|
|
|
@classmethod
|
2024-04-05 16:03:32 +02:00
|
|
|
def import_from_xml_tree(cls, tree, include_id=False, check_deprecated=False, **kwargs):
|
2024-03-04 17:23:41 +01:00
|
|
|
from wcs.backoffice.deprecations import DeprecatedElementsDetected, DeprecationsScan
|
2024-03-04 17:02:17 +01:00
|
|
|
|
|
|
|
data_source = super().import_from_xml_tree(
|
|
|
|
tree, include_id=include_id, check_deprecated=check_deprecated, **kwargs
|
|
|
|
)
|
2022-03-11 08:24:50 +01:00
|
|
|
DataSourceCategory.object_category_xml_import(data_source, tree, include_id=include_id)
|
2024-03-04 17:02:17 +01:00
|
|
|
|
|
|
|
if check_deprecated:
|
|
|
|
# check for deprecated elements
|
2024-03-04 17:23:41 +01:00
|
|
|
job = DeprecationsScan()
|
2024-03-04 17:02:17 +01:00
|
|
|
try:
|
|
|
|
job.check_deprecated_elements_in_object(data_source)
|
|
|
|
except DeprecatedElementsDetected as e:
|
|
|
|
raise NamedDataSourceImportError(str(e))
|
|
|
|
|
2022-03-11 08:24:50 +01:00
|
|
|
return data_source
|
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2023-04-25 13:39:50 +02:00
|
|
|
def get_by_slug(cls, slug, ignore_errors=True, stub_fallback=False):
|
2021-12-31 14:58:15 +01:00
|
|
|
data_source = super().get_by_slug(slug, ignore_errors=ignore_errors)
|
|
|
|
if data_source is None:
|
2023-04-25 13:39:50 +02:00
|
|
|
if stub_fallback:
|
2024-04-03 11:28:39 +02:00
|
|
|
get_logger().warning("data source '%s' does not exist" % slug)
|
2023-04-25 13:39:50 +02:00
|
|
|
return StubNamedDataSource(name=slug)
|
2021-12-31 14:58:15 +01:00
|
|
|
return data_source
|
2015-07-10 19:30:12 +02:00
|
|
|
|
2019-04-04 15:22:50 +02:00
|
|
|
def get_json_query_url(self):
|
2022-04-08 16:04:25 +02:00
|
|
|
url = self.get_variadic_url()
|
2020-03-03 14:18:58 +01:00
|
|
|
if not url:
|
|
|
|
return ''
|
2021-03-22 11:14:42 +01:00
|
|
|
if '?' not in url:
|
2019-04-04 15:22:50 +02:00
|
|
|
url += '?' + self.query_parameter + '='
|
|
|
|
else:
|
|
|
|
url += '&' + self.query_parameter + '='
|
|
|
|
return url
|
|
|
|
|
2023-01-31 16:03:56 +01:00
|
|
|
def get_jsonp_url(self, **kwargs):
|
2019-04-04 14:43:42 +02:00
|
|
|
if self.type == 'jsonp':
|
|
|
|
return self.data_source.get('value')
|
2021-07-06 09:14:09 +02:00
|
|
|
|
2022-01-17 17:50:11 +01:00
|
|
|
token_context = {}
|
2019-04-04 15:22:50 +02:00
|
|
|
if self.type == 'json' and self.query_parameter:
|
2020-11-12 19:30:49 +01:00
|
|
|
json_url = self.get_json_query_url()
|
2022-01-17 17:50:11 +01:00
|
|
|
token_context = {'url': json_url, 'data_source': self.id}
|
2021-07-06 09:14:09 +02:00
|
|
|
|
2022-01-17 17:50:11 +01:00
|
|
|
elif self.type and self.type.startswith('carddef:'):
|
2023-01-31 16:03:56 +01:00
|
|
|
token_context = {'carddef_ref': self.type, **kwargs}
|
2021-07-06 09:14:09 +02:00
|
|
|
|
2022-01-17 17:50:11 +01:00
|
|
|
parts = self.type.split(':')
|
|
|
|
if len(parts) > 2:
|
|
|
|
# custom view, check if it's dynamic
|
|
|
|
from wcs.carddef import CardDef
|
|
|
|
from wcs.workflows import WorkflowStatusItem
|
|
|
|
|
|
|
|
custom_view = CardDef.get_data_source_custom_view(self.type)
|
|
|
|
if custom_view is None:
|
|
|
|
get_publisher().record_error(
|
|
|
|
_('Unknown custom view "%s" for CardDef "%s"') % (parts[2], parts[1]),
|
|
|
|
context='[DATASOURCE]',
|
|
|
|
notify=True,
|
|
|
|
record=True,
|
2021-07-06 09:14:09 +02:00
|
|
|
)
|
2022-01-17 17:50:11 +01:00
|
|
|
else:
|
|
|
|
had_template = False
|
|
|
|
for filter_key, filter_value in custom_view.filters.items():
|
|
|
|
if not Template.is_template_string(filter_value):
|
|
|
|
continue
|
|
|
|
custom_view.filters[filter_key] = WorkflowStatusItem.compute(filter_value)
|
|
|
|
had_template = True
|
|
|
|
if had_template:
|
|
|
|
# keep altered custom view in token
|
2023-01-31 16:03:56 +01:00
|
|
|
token_context.update(
|
|
|
|
{
|
|
|
|
'dynamic_custom_view': custom_view.id,
|
|
|
|
'dynamic_custom_view_filters': custom_view.filters,
|
|
|
|
}
|
|
|
|
)
|
2022-01-17 17:50:11 +01:00
|
|
|
|
|
|
|
if token_context:
|
2023-07-15 07:18:05 +02:00
|
|
|
token = get_session().create_token('autocomplete', token_context)
|
2022-01-17 17:50:11 +01:00
|
|
|
return '/api/autocomplete/%s' % token.id
|
2021-07-06 09:14:09 +02:00
|
|
|
|
2019-04-04 14:43:42 +02:00
|
|
|
return None
|
|
|
|
|
2020-11-09 09:41:01 +01:00
|
|
|
def get_geojson_url(self):
|
|
|
|
assert self.type == 'geojson'
|
|
|
|
url = self.data_source.get('value').strip()
|
2022-04-08 16:04:25 +02:00
|
|
|
new_url = self.get_variadic_url()
|
|
|
|
if new_url != url:
|
2023-07-15 07:18:05 +02:00
|
|
|
token_context = {'url': new_url, 'slug': self.slug}
|
|
|
|
token = get_session().create_token('geojson', token_context)
|
2022-04-08 16:04:25 +02:00
|
|
|
return '/api/geojson/%s' % token.id
|
2021-03-02 10:21:04 +01:00
|
|
|
return '/api/geojson/%s' % self.slug
|
|
|
|
|
|
|
|
def get_geojson_data(self, force_url=None):
|
|
|
|
if force_url:
|
|
|
|
url = force_url
|
|
|
|
else:
|
2022-04-08 16:04:25 +02:00
|
|
|
url = self.get_variadic_url()
|
2020-11-09 09:41:01 +01:00
|
|
|
|
|
|
|
request = get_request()
|
2023-02-14 16:38:04 +01:00
|
|
|
cache_key = get_cache_key(url, self.extended_data_source)
|
|
|
|
if hasattr(request, 'datasources_cache') and cache_key in request.datasources_cache:
|
|
|
|
return request.datasources_cache[cache_key]
|
2020-11-09 09:41:01 +01:00
|
|
|
|
|
|
|
cache_duration = 0
|
|
|
|
if self.cache_duration:
|
|
|
|
cache_duration = int(self.cache_duration)
|
|
|
|
|
2022-12-09 16:24:00 +01:00
|
|
|
data = get_json_from_url(
|
|
|
|
url, self.data_source, raise_request_error=True, cache_duration=cache_duration
|
|
|
|
)
|
2020-11-09 09:41:01 +01:00
|
|
|
id_property = self.id_property or 'id'
|
|
|
|
label_template_property = self.label_template_property or '{{ text }}'
|
|
|
|
|
2023-01-02 15:39:09 +01:00
|
|
|
features = []
|
2020-11-09 09:41:01 +01:00
|
|
|
for feature in data['features']:
|
2023-01-02 15:39:09 +01:00
|
|
|
if id_property not in feature['properties']:
|
|
|
|
# missing id property, skip entry
|
|
|
|
continue
|
2020-11-09 09:41:01 +01:00
|
|
|
feature['properties']['_id'] = feature['properties'][id_property]
|
|
|
|
try:
|
|
|
|
feature['properties']['_text'] = Template(label_template_property).render(
|
|
|
|
feature['properties']
|
|
|
|
)
|
|
|
|
except (TemplateSyntaxError, VariableDoesNotExist):
|
|
|
|
pass
|
|
|
|
if not feature['properties'].get('_text'):
|
|
|
|
feature['properties']['_text'] = feature['properties']['_id']
|
2023-01-02 15:39:09 +01:00
|
|
|
features.append(feature)
|
|
|
|
data['features'] = features
|
2020-11-09 09:41:01 +01:00
|
|
|
|
|
|
|
if hasattr(request, 'datasources_cache'):
|
2023-02-14 16:38:04 +01:00
|
|
|
request.datasources_cache[cache_key] = data
|
2020-11-09 09:41:01 +01:00
|
|
|
|
|
|
|
return data
|
|
|
|
|
2020-03-31 14:45:41 +02:00
|
|
|
def get_value_by_id(self, param_name, param_value):
|
2022-04-08 16:04:25 +02:00
|
|
|
url = self.get_variadic_url()
|
2019-04-04 15:22:50 +02:00
|
|
|
|
2023-02-27 12:46:02 +01:00
|
|
|
if param_value is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
param_value = str(param_value)
|
|
|
|
|
2020-09-01 14:34:52 +02:00
|
|
|
if '?' not in url:
|
2019-04-04 15:22:50 +02:00
|
|
|
url += '?'
|
|
|
|
else:
|
|
|
|
url += '&'
|
2021-02-28 16:19:33 +01:00
|
|
|
url += param_name + '=' + urllib.parse.quote(param_value)
|
2019-04-04 15:22:50 +02:00
|
|
|
|
2020-09-01 14:34:52 +02:00
|
|
|
def find_item(items, name, value):
|
|
|
|
for item in items:
|
2020-10-22 18:11:10 +02:00
|
|
|
if str(item.get(name)) == str(value):
|
2020-09-01 14:34:52 +02:00
|
|
|
return item
|
|
|
|
# not found
|
2020-11-30 18:29:09 +01:00
|
|
|
get_publisher().record_error(_('Could not find element by id "%s"') % value)
|
2020-09-01 14:34:52 +02:00
|
|
|
return None
|
|
|
|
|
2019-04-04 15:22:50 +02:00
|
|
|
request = get_request()
|
|
|
|
if hasattr(request, 'datasources_cache') and url in request.datasources_cache:
|
2020-03-31 14:45:41 +02:00
|
|
|
items = request.datasources_cache[url]
|
|
|
|
if not items: # cache may contains empty list from get_structured_items
|
|
|
|
return None
|
2020-09-01 14:34:52 +02:00
|
|
|
return find_item(items, param_name, param_value)
|
2020-03-31 14:45:41 +02:00
|
|
|
|
2023-06-26 12:14:21 +02:00
|
|
|
items = request_json_items(url, self.extended_data_source)
|
2020-03-31 14:45:41 +02:00
|
|
|
if not items: # None or empty list are not valid
|
|
|
|
return None
|
2019-04-04 15:22:50 +02:00
|
|
|
if hasattr(request, 'datasources_cache'):
|
2020-03-31 14:45:41 +02:00
|
|
|
request.datasources_cache[url] = items
|
2020-09-01 14:34:52 +02:00
|
|
|
return find_item(items, param_name, param_value)
|
2019-04-04 15:22:50 +02:00
|
|
|
|
2020-09-30 22:04:20 +02:00
|
|
|
def get_card_structured_value_by_id(self, option_id):
|
|
|
|
from wcs.carddef import CardDef
|
2021-02-04 10:37:40 +01:00
|
|
|
|
2023-02-16 20:39:24 +01:00
|
|
|
if option_id is None:
|
|
|
|
return None
|
|
|
|
|
2021-04-02 08:09:25 +02:00
|
|
|
values = CardDef.get_data_source_items(self.type, get_by_id=option_id)
|
2020-09-30 22:04:20 +02:00
|
|
|
if not values:
|
2023-02-16 20:39:24 +01:00
|
|
|
values = CardDef.get_data_source_items(self.type, get_by_text=str(option_id))
|
2020-09-30 22:04:20 +02:00
|
|
|
if not values:
|
|
|
|
return None
|
|
|
|
return values[0]
|
|
|
|
|
2019-04-04 15:22:50 +02:00
|
|
|
def get_display_value(self, option_id):
|
|
|
|
value = self.get_structured_value(option_id)
|
|
|
|
if value:
|
|
|
|
return value.get('text')
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_structured_value(self, option_id):
|
|
|
|
value = None
|
2020-09-30 22:04:20 +02:00
|
|
|
if self.type and self.type.startswith('carddef:'):
|
|
|
|
value = self.get_card_structured_value_by_id(option_id)
|
|
|
|
elif self.type == 'json' and self.id_parameter:
|
2020-03-31 14:45:41 +02:00
|
|
|
value = self.get_value_by_id(self.id_parameter, option_id)
|
2023-11-21 12:01:41 +01:00
|
|
|
elif self.type == 'wcs:users':
|
2024-04-09 12:03:14 +02:00
|
|
|
if isinstance(option_id, get_publisher().user_class):
|
|
|
|
option_id = option_id.id
|
2023-11-21 12:01:41 +01:00
|
|
|
value = get_publisher().user_class.get_user_with_roles(
|
|
|
|
option_id,
|
|
|
|
included_roles=self.users_included_roles,
|
|
|
|
excluded_roles=self.users_excluded_roles,
|
|
|
|
include_disabled_users=self.include_disabled_users,
|
|
|
|
order_by='name',
|
|
|
|
)
|
|
|
|
if value:
|
|
|
|
value = get_data_source_entry_from_user(value)
|
|
|
|
|
2019-04-04 15:22:50 +02:00
|
|
|
else:
|
2020-11-09 09:41:01 +01:00
|
|
|
structured_items = get_structured_items(self.extended_data_source, mode='lazy')
|
2019-04-10 09:30:27 +02:00
|
|
|
for item in structured_items:
|
2019-04-04 15:22:50 +02:00
|
|
|
if str(item['id']) == str(option_id):
|
|
|
|
value = item
|
|
|
|
break
|
2019-04-10 09:30:27 +02:00
|
|
|
else:
|
|
|
|
# recheck in case option label was given instead of option id.
|
|
|
|
for item in structured_items:
|
|
|
|
if str(item['text']) == str(option_id):
|
|
|
|
value = item
|
|
|
|
break
|
2019-04-04 15:22:50 +02:00
|
|
|
if value is None:
|
|
|
|
return None
|
|
|
|
return value
|
|
|
|
|
2015-07-10 19:30:12 +02:00
|
|
|
@classmethod
|
|
|
|
def get_substitution_variables(cls):
|
|
|
|
return {'data_source': DataSourcesSubstitutionProxy()}
|
|
|
|
|
2019-07-31 12:23:03 +02:00
|
|
|
def type_label(self):
|
|
|
|
data_source_labels = {
|
2021-04-13 14:40:14 +02:00
|
|
|
'wcs:users': _('Users'),
|
2019-07-31 12:23:03 +02:00
|
|
|
'json': _('JSON'),
|
|
|
|
'jsonp': _('JSONP'),
|
2020-07-28 14:48:00 +02:00
|
|
|
'geojson': _('GeoJSON'),
|
2022-12-27 14:58:22 +01:00
|
|
|
'formula': _('Python Expression (deprecated)'),
|
2022-12-16 11:43:11 +01:00
|
|
|
'jsonvalue': _('JSON Expression'),
|
2019-07-31 12:23:03 +02:00
|
|
|
}
|
|
|
|
data_source_type = self.data_source.get('type')
|
|
|
|
return data_source_labels.get(data_source_type)
|
|
|
|
|
|
|
|
def humanized_cache_duration(self):
|
|
|
|
return seconds2humanduration(int(self.cache_duration))
|
|
|
|
|
2020-08-25 15:53:14 +02:00
|
|
|
def get_referenced_varnames(self, formdef):
|
|
|
|
from .fields import Field
|
2021-02-04 10:37:40 +01:00
|
|
|
|
2023-09-25 16:59:04 +02:00
|
|
|
if self.type in ('json', 'jsonvalue', 'geojson'):
|
2020-08-25 15:53:14 +02:00
|
|
|
return Field.get_referenced_varnames(formdef, self.data_source.get('value'))
|
|
|
|
# else: carddef
|
|
|
|
assert self.type.startswith('carddef:'), 'data source must be carddef'
|
|
|
|
from wcs.carddef import CardDef
|
2021-02-04 10:37:40 +01:00
|
|
|
|
2020-08-25 15:53:14 +02:00
|
|
|
return CardDef.get_data_source_referenced_varnames(self.type, formdef=formdef)
|
|
|
|
|
2020-03-09 16:34:12 +01:00
|
|
|
def get_variadic_url(self):
|
|
|
|
url = self.data_source.get('value').strip()
|
|
|
|
if url and Template.is_template_string(url):
|
|
|
|
vars = get_publisher().substitutions.get_context_variables(mode='lazy')
|
|
|
|
url = get_variadic_url(url, vars)
|
|
|
|
return url
|
|
|
|
|
2021-02-16 10:27:57 +01:00
|
|
|
def is_used(self):
|
|
|
|
from wcs.formdef import get_formdefs_of_all_kinds
|
2021-02-04 10:37:40 +01:00
|
|
|
|
2021-02-16 10:27:57 +01:00
|
|
|
for formdef in get_formdefs_of_all_kinds():
|
2024-04-14 16:58:39 +02:00
|
|
|
if any(self.usage_in_formdef(formdef)):
|
2021-02-16 10:27:57 +01:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2024-04-14 16:58:39 +02:00
|
|
|
def usage_in_formdef(self, formdef):
|
2019-10-10 18:01:34 +02:00
|
|
|
for field in formdef.fields or []:
|
|
|
|
data_source = getattr(field, 'data_source', None)
|
|
|
|
if not data_source:
|
|
|
|
continue
|
|
|
|
if data_source.get('type') == self.slug:
|
2024-04-14 16:58:39 +02:00
|
|
|
field._formdef = formdef
|
|
|
|
yield field
|
2019-10-10 18:01:34 +02:00
|
|
|
|
2015-07-10 19:30:12 +02:00
|
|
|
|
2019-10-30 19:11:59 +01:00
|
|
|
class StubNamedDataSource(NamedDataSource):
|
2022-12-28 11:08:41 +01:00
|
|
|
type = 'jsonvalue'
|
|
|
|
data_source = {'type': 'jsonvalue', 'value': '[]'}
|
2019-10-30 19:11:59 +01:00
|
|
|
cache_duration = None
|
|
|
|
|
|
|
|
def __init__(self, name=None):
|
|
|
|
self.name = name
|
|
|
|
|
|
|
|
def store(self):
|
|
|
|
pass
|
|
|
|
|
2022-11-22 10:26:14 +01:00
|
|
|
def get_admin_url(self):
|
|
|
|
return '#invalid-%s' % self.name
|
|
|
|
|
2019-10-30 19:11:59 +01:00
|
|
|
def __repr__(self):
|
|
|
|
return '<StubNamedDataSource %r>' % self.name
|
|
|
|
|
|
|
|
|
2021-03-22 09:19:23 +01:00
|
|
|
class DataSourcesSubstitutionProxy:
|
2015-07-10 19:30:12 +02:00
|
|
|
def __getattr__(self, attr):
|
2024-04-03 11:28:39 +02:00
|
|
|
if attr == 'inspect_collapse':
|
|
|
|
return True
|
2022-10-13 18:21:57 +02:00
|
|
|
return DataSourceProxy(attr)
|
2020-02-24 09:56:02 +01:00
|
|
|
|
|
|
|
def inspect_keys(self):
|
|
|
|
return []
|
2021-02-16 10:27:57 +01:00
|
|
|
|
|
|
|
|
2022-10-13 18:21:57 +02:00
|
|
|
class DataSourceProxy:
|
|
|
|
def __init__(self, name):
|
|
|
|
self.name = name
|
|
|
|
self.data_source = NamedDataSource.get_by_slug(self.name, stub_fallback=True)
|
|
|
|
self._list = get_structured_items(self.data_source.extended_data_source)
|
|
|
|
self._data = Ellipsis
|
|
|
|
|
|
|
|
def get_value(self):
|
|
|
|
return self._list
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return len(self._list)
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return str(self._list)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return '<DataSourceProxy, %s>' % self.name
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
yield from self._list
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
|
|
|
return any(self)
|
|
|
|
|
|
|
|
def __contains__(self, value):
|
|
|
|
return value in list(self)
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
return list(self) == list(other)
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
|
|
|
return list(self)[key]
|
|
|
|
|
|
|
|
def __getattr__(self, attr):
|
|
|
|
data_source = self.data_source.extended_data_source
|
|
|
|
if data_source.get('type') not in ['json', 'geojson']:
|
|
|
|
raise AttributeError
|
|
|
|
if self._data is Ellipsis:
|
|
|
|
url = get_json_url(data_source)
|
|
|
|
self._data = get_json_from_url(url, data_source)
|
|
|
|
if self._data is None:
|
|
|
|
raise AttributeError
|
2023-07-14 13:31:26 +02:00
|
|
|
try:
|
|
|
|
return self._data[attr]
|
|
|
|
except KeyError as e:
|
|
|
|
raise AttributeError(attr) from e
|
2022-10-13 18:21:57 +02:00
|
|
|
|
|
|
|
|
2021-02-16 10:27:57 +01:00
|
|
|
def has_chrono(publisher):
|
|
|
|
return publisher.get_site_option('chrono_url') is not None
|
|
|
|
|
|
|
|
|
|
|
|
def chrono_url(publisher, url):
|
|
|
|
chrono_url = publisher.get_site_option('chrono_url')
|
2021-02-28 16:19:33 +01:00
|
|
|
return urllib.parse.urljoin(chrono_url, url)
|
2021-02-16 10:27:57 +01:00
|
|
|
|
|
|
|
|
2023-05-30 13:52:24 +02:00
|
|
|
def chrono_variable(publisher):
|
|
|
|
chrono_url = publisher.get_site_option('chrono_url')
|
|
|
|
for key, value in publisher.get_site_options(section='variables').items():
|
|
|
|
if value == chrono_url:
|
|
|
|
return key
|
|
|
|
|
|
|
|
|
|
|
|
def translate_url(publisher, url):
|
|
|
|
variable = chrono_variable(publisher)
|
|
|
|
if not variable:
|
|
|
|
return url
|
|
|
|
chrono_url = publisher.get_site_option('chrono_url')
|
|
|
|
return url.replace(chrono_url, '{{ %s }}' % variable)
|
|
|
|
|
|
|
|
|
2021-02-16 10:27:57 +01:00
|
|
|
def collect_agenda_data(publisher):
|
|
|
|
agenda_url = chrono_url(publisher, 'api/agenda/')
|
|
|
|
result = get_json_from_url(agenda_url, log_message_part='agenda')
|
|
|
|
if result is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
# build datasources from chrono
|
|
|
|
agenda_data = []
|
|
|
|
for agenda in result.get('data') or []:
|
|
|
|
if agenda['kind'] == 'events':
|
2021-06-18 11:38:50 +02:00
|
|
|
agenda_data.append(
|
|
|
|
{
|
|
|
|
'slug': 'agenda-%s-%s' % (agenda['kind'], agenda['id']),
|
|
|
|
'text': agenda['text'],
|
|
|
|
'url': agenda['api']['datetimes_url'],
|
2024-01-16 08:55:18 +01:00
|
|
|
'qs_data': {'lock_code': '{{ session_hash_id }}'},
|
2021-06-18 11:38:50 +02:00
|
|
|
}
|
|
|
|
)
|
2021-02-16 10:27:57 +01:00
|
|
|
elif agenda['kind'] in ['meetings', 'virtual']:
|
|
|
|
agenda_data.append(
|
2021-06-18 11:38:50 +02:00
|
|
|
{
|
|
|
|
'slug': 'agenda-%s-%s-meetingtypes' % (agenda['kind'], agenda['id']),
|
|
|
|
'text': _('%s - Meeting types') % agenda['text'],
|
|
|
|
'url': agenda['api']['meetings_url'],
|
|
|
|
}
|
2021-02-16 10:27:57 +01:00
|
|
|
)
|
2023-06-19 11:36:42 +02:00
|
|
|
agenda_data.append(
|
|
|
|
{
|
|
|
|
'slug': 'agenda-%s-%s-mtdynamic' % (agenda['kind'], agenda['id']),
|
2023-06-23 14:56:38 +02:00
|
|
|
'text': _('%s - Slots of type form_var_meeting_type_raw') % agenda['text'],
|
|
|
|
'url': '%s{{ form_var_meeting_type_raw }}/datetimes/' % agenda['api']['meetings_url'],
|
2024-01-16 08:55:18 +01:00
|
|
|
'qs_data': {'lock_code': '{{ session_hash_id }}'},
|
2023-06-19 11:36:42 +02:00
|
|
|
}
|
|
|
|
)
|
2021-02-16 10:27:57 +01:00
|
|
|
# get also meeting types
|
|
|
|
mt_url = chrono_url(publisher, 'api/agenda/%s/meetings/' % agenda['id'])
|
|
|
|
mt_results = get_json_from_url(mt_url, log_message_part='agenda')
|
|
|
|
if mt_results is None:
|
|
|
|
return
|
|
|
|
for meetingtype in mt_results.get('data') or []:
|
|
|
|
agenda_data.append(
|
|
|
|
{
|
2021-06-18 11:38:50 +02:00
|
|
|
'slug': 'agenda-%s-%s-mt-%s' % (agenda['kind'], agenda['id'], meetingtype['id']),
|
2022-03-08 16:45:59 +01:00
|
|
|
'text': _('%s - Slots of type %s (%s minutes)')
|
|
|
|
% (agenda['text'], meetingtype['text'], meetingtype['duration']),
|
2021-02-16 10:27:57 +01:00
|
|
|
'url': meetingtype['api']['datetimes_url'],
|
2024-01-16 08:55:18 +01:00
|
|
|
'qs_data': {'lock_code': '{{ session_hash_id }}'},
|
2021-02-16 10:27:57 +01:00
|
|
|
}
|
|
|
|
)
|
|
|
|
return agenda_data
|
|
|
|
|
|
|
|
|
2021-10-22 11:56:02 +02:00
|
|
|
def build_agenda_datasources(publisher, **kwargs):
|
2021-02-16 10:27:57 +01:00
|
|
|
if not has_chrono(publisher):
|
|
|
|
return
|
|
|
|
|
|
|
|
agenda_data = collect_agenda_data(publisher)
|
|
|
|
if agenda_data is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
# fetch existing datasources
|
|
|
|
existing_datasources = {}
|
|
|
|
for datasource in NamedDataSource.select():
|
|
|
|
if datasource.external != 'agenda':
|
|
|
|
continue
|
|
|
|
existing_datasources[datasource.data_source['value']] = datasource
|
|
|
|
seen_datasources = []
|
|
|
|
|
|
|
|
# build datasources from chrono
|
|
|
|
for agenda in agenda_data:
|
2023-05-30 13:52:24 +02:00
|
|
|
url = translate_url(publisher, agenda['url'])
|
2023-06-23 14:56:38 +02:00
|
|
|
# migration code, 2023-06-23, to remove later
|
2021-02-16 10:27:57 +01:00
|
|
|
datasource = existing_datasources.get(url)
|
2023-06-23 14:56:38 +02:00
|
|
|
if datasource is None and url.endswith('{{ form_var_meeting_type_raw }}/datetimes/'):
|
|
|
|
old_url = url.replace('form_var_meeting_type_raw', 'form_var_meeting_type')
|
|
|
|
datasource = existing_datasources.get(old_url)
|
|
|
|
if datasource:
|
|
|
|
existing_datasources.pop(old_url)
|
|
|
|
datasource.data_source['value'] = url
|
2021-02-16 10:27:57 +01:00
|
|
|
if datasource is None:
|
|
|
|
datasource = NamedDataSource()
|
2021-06-18 11:38:50 +02:00
|
|
|
datasource.slug = datasource.get_new_slug('chrono_ds_%s' % agenda['slug'])
|
2021-02-16 10:27:57 +01:00
|
|
|
datasource.external = 'agenda'
|
|
|
|
datasource.data_source = {'type': 'json', 'value': url}
|
|
|
|
datasource.external_status = None # reset
|
2023-12-12 16:56:03 +01:00
|
|
|
datasource.record_on_errors = False # those will be internal publik errors
|
|
|
|
datasource.notify_on_errors = True # that should be notified to sysadmins.
|
2021-02-16 10:27:57 +01:00
|
|
|
datasource.name = agenda['text']
|
2024-01-16 08:55:18 +01:00
|
|
|
datasource.qs_data = agenda.get('qs_data')
|
2021-02-16 10:27:57 +01:00
|
|
|
datasource.store()
|
|
|
|
# maintain caches
|
|
|
|
existing_datasources[url] = datasource
|
|
|
|
seen_datasources.append(url)
|
|
|
|
|
|
|
|
# now check outdated agenda datasources
|
|
|
|
for url, datasource in existing_datasources.items():
|
|
|
|
if url in seen_datasources:
|
|
|
|
continue
|
|
|
|
if datasource.is_used():
|
|
|
|
datasource.external_status = 'not-found'
|
|
|
|
datasource.store()
|
|
|
|
continue
|
|
|
|
datasource.remove_self()
|
|
|
|
|
|
|
|
|
2021-02-16 15:58:46 +01:00
|
|
|
class RefreshAgendas(AfterJob):
|
2021-05-15 15:34:16 +02:00
|
|
|
label = _('Refreshing agendas')
|
2021-02-16 15:58:46 +01:00
|
|
|
|
|
|
|
def execute(self):
|
|
|
|
build_agenda_datasources(get_publisher())
|
|
|
|
|
|
|
|
|
2021-04-06 19:03:44 +02:00
|
|
|
def register_cronjob():
|
2021-02-16 10:27:57 +01:00
|
|
|
# every hour: check for agenda datasources
|
|
|
|
get_publisher_class().register_cronjob(
|
|
|
|
CronJob(build_agenda_datasources, name='build_agenda_datasources', minutes=[0])
|
|
|
|
)
|