Compare commits

...

13 Commits

Author SHA1 Message Date
Pierre Ducroquet d5036ed141 sql: test purge of search tokens (#86527)
gitea/wcs/pipeline/head There was a failure building this commit Details
2024-04-22 11:54:56 +02:00
Pierre Ducroquet d8ea725cdc wcs_search_tokens: new FTS mechanism with fuzzy-match (#86527)
introduce a new mechanism to implement FTS with fuzzy-match.
This is made possible by adding and maintaining a table of the
FTS tokens, wcs_search_tokens, fed with searchable_formdefs
and wcs_all_forms.
When a query is issued, its tokens are matched against the
tokens with a fuzzy match when no direct match is found, and
the query is then rebuilt.
2024-04-22 11:54:56 +02:00
Pierre Ducroquet 496b95910c tests: add a test for new FTS on formdefs (#86527) 2024-04-22 11:54:56 +02:00
Frédéric Péters c191656d4a api: raise 400 if multiple order_by are given (#89832)
gitea/wcs/pipeline/head This commit looks good Details
2024-04-22 10:52:47 +02:00
Frédéric Péters 4e0b3469f1 api: handle block fields in geojson (#89831)
gitea/wcs/pipeline/head This commit looks good Details
2024-04-22 10:01:59 +02:00
Frédéric Péters b6c83cca37 misc: do not allow single quotes in custom identifiers (#89816) 2024-04-22 10:01:55 +02:00
Frédéric Péters aa070498e4 misc: do not mark forms and cards as collapsed variables (#89811)
gitea/wcs/pipeline/head This commit looks good Details
2024-04-19 16:33:51 +02:00
Frédéric Péters 0dc5d3267f backoffice: display an error on diff of snapshots that do not load (#89801)
gitea/wcs/pipeline/head This commit looks good Details
2024-04-19 15:03:06 +02:00
Frédéric Péters 61ce06676d misc: do not compute drafts info when comparing snapshots (#89799)
gitea/wcs/pipeline/head This commit looks good Details
2024-04-19 14:01:14 +02:00
Frédéric Péters 291fea5b8b misc: adapt to godo changes (#89633)
gitea/wcs/pipeline/head This commit looks good Details
2024-04-19 10:09:03 +02:00
Frédéric Péters 62d178f73c misc: add device geolocation as initial position for markers map (#89641)
gitea/wcs/pipeline/head This commit looks good Details
2024-04-18 09:25:16 +02:00
Frédéric Péters b9b6912385 misc: provide formdata context when updating relations (#89652)
gitea/wcs/pipeline/head This commit looks good Details
2024-04-18 09:25:06 +02:00
Frédéric Péters 992bc9720a misc: add proper draft handling when adding a block row on first page (#89664)
gitea/wcs/pipeline/head This commit looks good Details
2024-04-17 09:35:44 +02:00
22 changed files with 605 additions and 66 deletions

View File

@ -1266,6 +1266,9 @@ def test_api_list_formdata(pub, local_user):
resp = get_app(pub).get(sign_uri('/api/forms/test/list?full=on&order_by=-foobar', user=local_user))
assert [d['fields']['foobar'] for d in resp.json] == ['FOO BAR %02d' % i for i in range(29, -1, -1)]
# check 400 on multiple order_by
get_app(pub).get(sign_uri('/api/forms/test/list?full=on&order_by=f0,foobar', user=local_user), status=400)
# check fts
resp = get_app(pub).get(sign_uri('/api/forms/test/list?full=on&q=foo', user=local_user))
assert len(resp.json) == 30
@ -3048,6 +3051,65 @@ def test_api_geojson_formdata_related_field(pub, local_user):
assert properties['item - foobar'] == 'test.txt'
def test_api_geojson_formdata_file_in_block_field(pub, local_user):
pub.role_class.wipe()
role = pub.role_class(name='test')
role.store()
# add role to user
local_user.roles = [role.id]
local_user.store()
BlockDef.wipe()
block = BlockDef()
block.name = 'foobar'
block.fields = [
fields.FileField(id='123', label='file', varname='foo'),
]
block.store()
FormDef.wipe()
formdef = FormDef()
formdef.name = 'test'
formdef.workflow_roles = {'_receiver': role.id}
formdef.fields = [
fields.BlockField(id='1', label='test', varname='blockdata', block_slug='foobar', max_items=3),
]
formdef.geolocations = {'base': 'Location'}
formdef.store()
data_class = formdef.data_class()
data_class.wipe()
upload = PicklableUpload('test.txt', 'text/plain', 'ascii')
upload.receive([b'base64me'])
formdata = formdef.data_class()()
formdata.data = {'1': {'data': [{'123': upload}], 'schema': {'123': 'file'}}, '1_display': 'test.txt'}
formdata.geolocations = {'base': {'lat': 48, 'lon': 2}}
formdata.just_created()
formdata.store()
# get with blockfield
resp = get_app(pub).get(sign_uri('/api/forms/test/geojson?filter=all&1=on', user=local_user))
assert len(resp.json['features']) == 1
assert resp.json['features'][0]['properties']['id'] == '1-1'
assert resp.json['features'][0]['properties']['display_fields'][0]['value'] == 'test.txt'
# get with file field in block as property
resp = get_app(pub).get(sign_uri('/api/forms/test/geojson?filter=all&1-123=on', user=local_user))
assert len(resp.json['features']) == 1
assert resp.json['features'][0]['properties']['id'] == '1-1'
assert resp.json['features'][0]['properties']['display_fields'][0]['value'] == 'test.txt'
assert 'download?f=1$0$123' in resp.json['features'][0]['properties']['display_fields'][0]['html_value']
# check full=on
resp = get_app(pub).get(sign_uri('/api/forms/test/geojson?filter=all&full=on', user=local_user))
assert len(resp.json['features']) == 1
properties = {x['label']: x['value'] for x in resp.json['features'][0]['properties']['display_fields']}
assert properties['test'] == 'test.txt'
assert 'file' not in properties
def test_api_distance_filter(pub, local_user):
pub.role_class.wipe()
role = pub.role_class(name='test')

View File

@ -429,6 +429,9 @@ def test_backoffice_submission_formdef_list_search(pub, local_user, access, auth
resp = get_url('/api/formdefs/?backoffice-submission=on&q=test')
assert len(resp.json['data']) == 2
resp = get_url('/api/formdefs/?backoffice-submission=on&q=tes')
assert len(resp.json['data']) == 2
resp = get_url('/api/formdefs/?backoffice-submission=on&q=xyz')
assert len(resp.json['data']) == 0
@ -441,6 +444,15 @@ def test_backoffice_submission_formdef_list_search(pub, local_user, access, auth
resp = get_url('/api/formdefs/?backoffice-submission=on&q=abc')
assert len(resp.json['data']) == 2
FormDef.wipe()
formdef = FormDef()
formdef.name = 'Intervention du service hygiène, salubrité et environnement'
formdef.backoffice_submission_roles = [role.id]
formdef.fields = []
formdef.store()
resp = get_url('/api/formdefs/?backoffice-submission=on&q=salubrité')
assert len(resp.json['data']) == 1
def test_formdef_schema(pub, access):
Workflow.wipe()

View File

@ -2829,3 +2829,55 @@ def test_block_titles_and_empty_block_on_summary_page(pub, emails):
assert 'Form Title' not in resp.text
assert 'Form Page' not in emails.get('New form (form title)')['msg'].get_payload()[0].get_payload()
assert 'Form Title' not in emails.get('New form (form title)')['msg'].get_payload()[0].get_payload()
@pytest.mark.parametrize('logged_user', ['logged', 'anonymous'])
@pytest.mark.parametrize('tracking_code', ['with-tracking-code', 'without-tracking-code'])
def test_block_multiple_rows_single_draft(pub, logged_user, tracking_code):
create_user(pub)
FormDef.wipe()
BlockDef.wipe()
block = BlockDef()
block.name = 'foobar'
block.fields = [fields.StringField(id='123', required=True, label='Test')]
block.store()
formdef = FormDef()
formdef.name = 'form title'
formdef.fields = [
fields.BlockField(id='1', label='test', block_slug='foobar', max_items=5),
]
formdef.enable_tracking_codes = bool(tracking_code == 'with-tracking-code')
formdef.store()
formdef.data_class().wipe()
app = get_app(pub)
if logged_user == 'logged':
login(app, username='foo', password='foo')
resp = app.get(formdef.get_url())
resp.form['f1$element0$f123'].value = 'Hello World'
resp = resp.form.submit('f1$add_element') # add second row
if logged_user == 'logged' or formdef.enable_tracking_codes:
assert formdef.data_class().count() == 1
assert formdef.data_class().select()[0].status == 'draft'
else:
assert formdef.data_class().count() == 0
resp.form['f1$element1$f123'].value = 'Something else'
resp = resp.form.submit('f1$add_element') # add third row
if logged_user == 'logged' or formdef.enable_tracking_codes:
assert formdef.data_class().count() == 1
assert formdef.data_class().select()[0].status == 'draft'
else:
assert formdef.data_class().count() == 0
resp.form['f1$element2$f123'].value = 'Something else'
resp = resp.form.submit('submit') # -> validation page
resp = resp.form.submit('submit') # -> end page
resp = resp.follow()
assert formdef.data_class().count() == 1
assert formdef.data_class().select()[0].status == 'wf-new'

View File

@ -1396,6 +1396,29 @@ def test_form_item_dynamic_map_data_source(pub, http_requests):
assert len(resp_geojson.json['features']) == 2
def test_form_item_map_data_source_initial_position(pub, http_requests):
NamedDataSource.wipe()
data_source = NamedDataSource(name='foobar')
data_source.data_source = {
'type': 'geojson',
'value': 'http://remote.example.net/geojson',
}
data_source.id_property = 'id'
data_source.label_template_property = '{{ text }}'
data_source.cache_duration = '5'
data_source.store()
formdef = create_formdef()
formdef.fields = [
fields.ItemField(id='1', label='map', display_mode='map', initial_position='geoloc'),
]
formdef.store()
formdef.data_class().wipe()
app = get_app(pub)
resp = app.get('/test/')
assert resp.pyquery('[data-init_with_geoloc="true"]')
def test_form_item_timetable_data_source(pub, http_requests):
NamedDataSource.wipe()
data_source = NamedDataSource(name='foobar')

View File

@ -1331,6 +1331,7 @@ def test_card_custom_id_format(pub):
assert data_class.force_valid_id_characters('_Fôô bar-') == '_Foo-bar-'
assert data_class.force_valid_id_characters('_Fôô bar☭-') == '_Foo-bar-'
assert data_class.force_valid_id_characters('_Fôô bar❗') == '_Foo-bar'
assert data_class.force_valid_id_characters(' Foo\'bar') == 'Foo-bar'
def test_card_update_related(pub):
@ -1519,6 +1520,80 @@ def test_card_update_related_with_custom_view(pub):
assert formdata.data['1_display'] == 'view-card1-change1'
def test_card_update_related_with_items_dynamic_custom_view(pub):
CardDef.wipe()
FormDef.wipe()
pub.custom_view_class.wipe()
carddef = CardDef()
carddef.name = 'foo'
carddef.fields = [
StringField(id='1', label='Test', varname='foo'),
StringField(id='2', label='Test2'),
]
carddef.digest_templates = {
'default': '{{ form_var_foo }}',
'custom-view:view': 'view-{{ form_var_foo }}',
}
carddef.store()
carddef.data_class().wipe()
carddata1 = carddef.data_class()()
carddata1.data = {'1': 'card1', '2': 'ok'}
carddata1.just_created()
carddata1.store()
carddata2 = carddef.data_class()()
carddata2.data = {'1': 'card2', '2': 'ok'}
carddata2.just_created()
carddata2.store()
custom_view = pub.custom_view_class()
custom_view.title = 'view'
custom_view.formdef = carddef
custom_view.columns = {'list': [{'id': 'id'}]}
custom_view.filters = {}
custom_view.filters = {'filter-2': 'on', 'filter-2-value': '{{ form_var_data }}'}
custom_view.visibility = 'datasource'
custom_view.store()
formdef = FormDef()
formdef.name = 'foo'
formdef.fields = [
StringField(id='0', label='Foo', varname='data'),
ItemsField(id='1', label='Test', data_source={'type': 'carddef:foo:view'}),
]
formdef.store()
formdata = formdef.data_class()()
formdata.data = {'0': 'ok', '1': ['1']}
formdata.data['1_display'] = 'view-card1'
assert formdata.data['1_display'] == 'view-card1'
formdata.just_created()
formdata.store()
# check usual situation, carddata changed but is still present in the result set
pub.cleanup()
carddef = carddef.get(carddef.id)
carddata1 = carddef.data_class().get(carddata1.id)
carddata1.data = {'1': 'card1-change1', '2': 'ok'}
carddata1.store()
formdata.refresh_from_storage()
assert formdata.data['1'] == ['1']
assert formdata.data['1_display'] == 'view-card1-change1'
# check with a card that will no longer be part of the custom view result set
pub.cleanup()
carddef = carddef.get(carddef.id)
carddata1 = carddef.data_class().get(carddata1.id)
carddata1.data = {'1': 'card1-change2', '2': 'ko'}
carddata1.store()
formdata.refresh_from_storage()
assert formdata.data['1_display'] == 'view-card1-change1' # no update, but data still here
def test_card_update_related_cascading(pub):
BlockDef.wipe()
CardDef.wipe()

View File

@ -12,7 +12,7 @@ from wcs.carddef import CardDef
from wcs.categories import Category
from wcs.comment_templates import CommentTemplate
from wcs.data_sources import NamedDataSource
from wcs.fields import CommentField, ItemField, PageField, StringField
from wcs.fields import BlockField, CommentField, ItemField, PageField, StringField
from wcs.formdef import FormDef
from wcs.mail_templates import MailTemplate
from wcs.qommon.form import UploadedFile
@ -362,6 +362,61 @@ def test_form_snapshot_diff(pub):
assert 'Snapshot <a href="%s/view/">%s</a> - (Version 42.0)' % (snapshot3.id, snapshot3.id) in resp
def test_form_snapshot_diff_with_reference_error(pub):
create_superuser(pub)
create_role(pub)
BlockDef.wipe()
blockdef = BlockDef()
blockdef.name = 'testblock'
blockdef.fields = []
blockdef.store()
FormDef.wipe()
formdef = FormDef()
formdef.name = 'testform'
formdef.fields = [
BlockField(id='1', label='block1', varname='foo', block_slug=blockdef.slug),
]
formdef.store()
assert pub.snapshot_class.count() == 2
snapshot1 = pub.snapshot_class.get_latest('formdef', formdef.id)
formdef.fields.append(StringField(id=2, label='Test'))
formdef.store()
assert pub.snapshot_class.count() == 3
formdef.fields = formdef.fields[1:]
formdef.store()
assert pub.snapshot_class.count() == 4
snapshot3 = pub.snapshot_class.get_latest('formdef', formdef.id)
app = login(get_app(pub))
resp = app.get(
'/backoffice/forms/%s/history/compare?version1=%s&version2=%s'
% (formdef.id, snapshot1.id, snapshot3.id)
)
assert resp.pyquery('h2').text() == 'Compare snapshots (XML)'
resp = app.get(
'/backoffice/forms/%s/history/compare?version1=%s&version2=%s&mode=inspect'
% (formdef.id, snapshot1.id, snapshot3.id)
)
assert resp.pyquery('h2').text() == 'Compare snapshots (Inspect)'
BlockDef.wipe()
resp = app.get(
'/backoffice/forms/%s/history/compare?version1=%s&version2=%s'
% (formdef.id, snapshot1.id, snapshot3.id)
)
assert resp.pyquery('h2').text() == 'Compare snapshots (XML)'
resp = app.get(
'/backoffice/forms/%s/history/compare?version1=%s&version2=%s&mode=inspect'
% (formdef.id, snapshot1.id, snapshot3.id)
)
assert resp.pyquery('h2').text() == 'Error'
assert 'Can not display snapshot (Unknown referenced objects)' in resp.text
def test_form_snapshot_comments(pub):
create_superuser(pub)
create_role(pub)

View File

@ -1185,6 +1185,45 @@ def test_sql_criteria_fts(pub):
assert data_class.select([st.FtsMatch(formdata1.id_display)])[0].id_display == formdata1.id_display
def test_search_tokens_purge(pub):
_, cur = sql.get_connection_and_cursor()
# purge garbage from other tests
sql.purge_obsolete_search_tokens()
cur.execute('SELECT count(*) FROM wcs_search_tokens;')
start = cur.fetchone()[0]
# define a new table
test_formdef = FormDef()
test_formdef.name = 'tableSelectFTStokens'
test_formdef.fields = [fields.StringField(id='3', label='string')]
test_formdef.store()
data_class = test_formdef.data_class(mode='sql')
cur.execute('SELECT count(*) FROM wcs_search_tokens;')
assert cur.fetchone()[0] == start + 1
t = data_class()
t.data = {'3': 'foofortokensofcourse'}
t.just_created()
t.store()
cur.execute('SELECT count(*) FROM wcs_search_tokens;')
assert cur.fetchone()[0] == start + 2
t.data = {'3': 'chaussettefortokensofcourse'}
t.store()
cur.execute('SELECT count(*) FROM wcs_search_tokens;')
assert cur.fetchone()[0] == start + 3
sql.purge_obsolete_search_tokens()
cur.execute('SELECT count(*) FROM wcs_search_tokens;')
assert cur.fetchone()[0] == start + 2
def table_exists(cur, table_name):
cur.execute(
'''SELECT COUNT(*) FROM information_schema.tables

View File

@ -1754,7 +1754,10 @@ class FormDefPage(Directory, TempfileDirectoryMixin, DocumentableMixin):
else:
role_label = '-'
view.role = role_label
context['custom_views'] = sorted(custom_views, key=lambda x: getattr(x, 'title'))
context['is_carddef'] = isinstance(self.formdef, CardDef)
if not hasattr(self.formdef, 'snapshot_object'):
deprecations = DeprecationsDirectory()
context['deprecations'] = deprecations.get_deprecations(
@ -1762,54 +1765,53 @@ class FormDefPage(Directory, TempfileDirectoryMixin, DocumentableMixin):
)
context['deprecation_titles'] = deprecations.titles
receipt_time_criteria = GreaterOrEqual(
'receipt_time',
datetime.datetime.now() - datetime.timedelta(days=self.formdef.get_drafts_lifespan()),
)
receipt_time_criteria = GreaterOrEqual(
'receipt_time',
datetime.datetime.now() - datetime.timedelta(days=self.formdef.get_drafts_lifespan()),
)
temp_drafts = defaultdict(int)
for formdata in self.formdef.data_class().select_iterator(
clause=[Equal('status', 'draft'), receipt_time_criteria], itersize=200
):
page_id = formdata.page_id if formdata.page_id is not None else '_unknown'
temp_drafts[page_id] += 1
temp_drafts = defaultdict(int)
for formdata in self.formdef.data_class().select_iterator(
clause=[Equal('status', 'draft'), receipt_time_criteria], itersize=200
):
page_id = formdata.page_id if formdata.page_id is not None else '_unknown'
temp_drafts[page_id] += 1
total_drafts = sum(temp_drafts.values()) if temp_drafts else 0
drafts = {}
special_page_index_mapping = {
'_first_page': -1000, # first
'_unknown': 1000, # last
'_confirmation_page': 999, # second to last
}
if total_drafts:
for page_id, page_index in special_page_index_mapping.items():
try:
page_total = temp_drafts.pop(page_id)
except KeyError:
page_total = 0
drafts[page_id] = {'total': page_total, 'field': None, 'page_index': page_index}
for page_id, page_total in temp_drafts.items():
for index, field in enumerate(self.formdef.iter_fields(with_backoffice_fields=False)):
if page_id == field.id and isinstance(field, PageField):
drafts[page_id] = {
'total': page_total,
'field': field,
'page_index': index,
}
break
else:
drafts['_unknown']['total'] += page_total
total_drafts = sum(temp_drafts.values()) if temp_drafts else 0
drafts = {}
special_page_index_mapping = {
'_first_page': -1000, # first
'_unknown': 1000, # last
'_confirmation_page': 999, # second to last
}
if total_drafts:
for page_id, page_index in special_page_index_mapping.items():
try:
page_total = temp_drafts.pop(page_id)
except KeyError:
page_total = 0
drafts[page_id] = {'total': page_total, 'field': None, 'page_index': page_index}
for page_id, page_total in temp_drafts.items():
for index, field in enumerate(self.formdef.iter_fields(with_backoffice_fields=False)):
if page_id == field.id and isinstance(field, PageField):
drafts[page_id] = {
'total': page_total,
'field': field,
'page_index': index,
}
break
else:
drafts['_unknown']['total'] += page_total
for draft_data in drafts.values():
draft_data['percent'] = 100 * draft_data['total'] / total_drafts
for draft_data in drafts.values():
draft_data['percent'] = 100 * draft_data['total'] / total_drafts
total_formdata = self.formdef.data_class().count([receipt_time_criteria])
context['drafts'] = sorted(drafts.items(), key=lambda x: x[1]['page_index'])
context['percent_submitted_formdata'] = 100 * (total_formdata - total_drafts) / total_formdata
context['total_formdata'] = total_formdata
total_formdata = self.formdef.data_class().count([receipt_time_criteria])
context['drafts'] = sorted(drafts.items(), key=lambda x: x[1]['page_index'])
context['percent_submitted_formdata'] = 100 * (total_formdata - total_drafts) / total_formdata
context['total_formdata'] = total_formdata
context['total_drafts'] = total_drafts
context['is_carddef'] = isinstance(self.formdef, CardDef)
context['total_drafts'] = total_drafts
return template.QommonTemplateResponse(
templates=[self.inspect_template_name],

View File

@ -124,6 +124,9 @@ def geojson_formdatas(formdatas, geoloc_key='base', fields=None):
if hasattr(html_value, 'replace'):
html_value = html_value.replace('[download]', '%sdownload' % formdata_backoffice_url)
value = formdata.get_field_view_value(field)
if field.key == 'block':
# return display value for block fields, not the internal structure
value = formdata.data.get(f'{field.id}_display')
if not html_value and not value:
continue
@ -133,7 +136,7 @@ def geojson_formdatas(formdatas, geoloc_key='base', fields=None):
'value': str(value),
'html_value': str(htmlescape(html_value)),
}
if field.key == 'file':
if field.key == 'file' and not getattr(field, 'block_field', None):
raw_value = formdata.data.get(field.id)
if raw_value.has_redirect_url():
geojson_infos['file_url'] = field.get_download_url(file_value=raw_value)
@ -1724,7 +1727,7 @@ class FormPage(Directory, TempfileDirectoryMixin):
self.view.remove_self()
return redirect('..')
def get_formdef_fields(self, include_block_items_fields=False):
def get_formdef_fields(self, include_block_fields=True, include_block_items_fields=False):
yield filter_fields.IdFilterField(formdef=self.formdef)
if self.formdef.default_digest_template:
yield filter_fields.DigestFilterField(formdef=self.formdef)
@ -1744,7 +1747,7 @@ class FormPage(Directory, TempfileDirectoryMixin):
field.has_relations = True
yield filter_fields.UserRelatedField(field)
for field in self.formdef.iter_fields(include_block_fields=True):
for field in self.formdef.iter_fields(include_block_fields=include_block_fields):
if getattr(field, 'block_field', None):
if field.key == 'items' and not include_block_items_fields:
# not yet
@ -2689,7 +2692,7 @@ class FormPage(Directory, TempfileDirectoryMixin):
selected_filter = self.get_filter_from_query()
selected_filter_operator = self.get_filter_operator_from_query()
if get_request().form.get('full') == 'on':
fields = list(self.get_formdef_fields())
fields = list(self.get_formdef_fields(include_block_fields=False))
else:
fields = self.get_fields_from_query()
criterias = self.get_criterias_from_query()

View File

@ -136,7 +136,13 @@ class SnapshotsDirectory(Directory):
if mode == 'inspect' and not has_inspect:
raise errors.TraversalError()
context = getattr(self, 'get_compare_%s_context' % mode)(snapshot1, snapshot2)
from wcs.blocks import BlockdefImportError
try:
context = getattr(self, 'get_compare_%s_context' % mode)(snapshot1, snapshot2)
except (BlockdefImportError, FormdefImportError, WorkflowImportError) as e:
return template.error_page(_('Can not display snapshot (%s)') % e)
context.update(
{
'mode': mode,

View File

@ -163,6 +163,7 @@ class UpdateRelationsAfterJob(AfterJob):
return
klass = {'carddef': CardDef, 'formdef': FormDef}
publisher = get_publisher()
# check all known reverse relations
for obj_ref in {x['obj'] for x in carddef.reverse_relations}:
@ -207,6 +208,11 @@ class UpdateRelationsAfterJob(AfterJob):
if objdata_seen_key in update_related_seen:
# do not allow updates to cycle back
continue
publisher.reset_formdata_state()
publisher.substitutions.feed(objdata.formdef)
publisher.substitutions.feed(objdata)
objdata_changed = False
for field in fields:
if getattr(field, 'block_field', None):

View File

@ -613,6 +613,7 @@ class ItemField(WidgetField, MapOptionsMixin, ItemFieldMixin, ItemWithImageField
title=_('Initial Position'),
options=(
('', _('Default position (from markers)'), ''),
('geoloc', _('Device geolocation'), 'geoloc'),
('template', _('From template'), 'template'),
),
value=self.initial_position or '',

View File

@ -501,9 +501,8 @@ class FormData(StorableObject):
@classmethod
def force_valid_id_characters(cls, value):
value = unidecode.unidecode(value)
value = re.sub(r'[^\w\s\'\-_]', '', unidecode.unidecode(value)).strip()
value = re.sub(r'\s+', '-', value)
value = re.sub(r'[\s\']+', '-', value)
return value
def set_auto_fields(self, *args, **kwargs):

View File

@ -1444,10 +1444,12 @@ class FormPage(Directory, TempfileDirectoryMixin, FormTemplateMixin):
# by clicking on a submit widget; for example if an "add row"
# button is clicked. [ADD_ROW_BUTTON]
if form.has_errors() or form.get_submit() is True:
if self.has_draft_support() and not honeypot_error:
token_error = form.get_widget('_form_id') and form.get_widget('_form_id').has_error()
if self.has_draft_support() and not (honeypot_error or token_error):
# save draft during server roundtrip
try:
self.save_draft(form_data)
session.add_magictoken(magictoken, form_data) # make sure draft id is saved
except SubmittedDraftException:
get_session().message = ('error', self.already_submitted_message)
return redirect(

View File

@ -497,6 +497,7 @@ class WcsPublisher(QommonPublisher):
for _formdef in FormDef.select() + CardDef.select():
sql.do_formdef_tables(_formdef)
sql.migrate_global_views(conn, cur)
sql.init_search_tokens()
cur.close()
def record_deprecated_usage(self, *args, **kwargs):

View File

@ -1148,7 +1148,7 @@ def get_int_or_400(value):
def get_order_by_or_400(value):
if value in (None, ''):
return None
if not re.match(r'-?[a-z0-9_-]+$', value):
if not (isinstance(value, str) and re.match(r'-?[a-z0-9_-]+$', value)):
raise RequestError()
return value

View File

@ -713,6 +713,11 @@ class QommonPublisher(Publisher):
for error in self.loggederror_class.select(clause=clauses):
self.loggederror_class.remove_object(error.id)
def clean_search_tokens(self, **kwargs):
from wcs import sql
sql.purge_obsolete_search_tokens()
@classmethod
def register_cronjobs(cls):
cls.register_cronjob(CronJob(cls.clean_sessions, minutes=[0], name='clean_sessions'))
@ -725,6 +730,9 @@ class QommonPublisher(Publisher):
cls.register_cronjob(
CronJob(cls.clean_loggederrors, hours=[3], minutes=[0], name='clean_loggederrors')
)
cls.register_cronjob(
CronJob(cls.clean_search_tokens, weekdays=[0], hours=[1], minutes=[0], name='clean_search_tokens')
)
_initialized = False

View File

@ -3223,7 +3223,7 @@ aside .bo-block.documentation {
}
.godo.html-edition,
.godo.html-edition--show {
.godo.html-edition.is-editable {
--padding: 0.5em;
outline: none;
background: transparent;
@ -3238,7 +3238,7 @@ aside .bo-block.documentation {
}
}
.godo.html-edition--show {
.godo.html-edition.is-editable {
.godo--editor > :first-child {
padding-top: var(--padding);
}

View File

@ -549,8 +549,6 @@ $(function() {
const documentation_save_button = document.querySelector('.bo-block.documentation button.save')
var clear_documentation_save_marks_timeout_id = null
if (editor_link) {
document.querySelector('#documentation-editor .godo--editor').setAttribute('contenteditable', 'false')
documentation_save_button.addEventListener('click', (e) => {
editor.sourceContent = editor.getHTML()
var documentation_message = Object()
@ -594,10 +592,9 @@ $(function() {
editor_link.addEventListener('click', (e) => {
e.preventDefault()
if (editor_link.getAttribute('aria-pressed') == 'true') {
editor.validEdition()
editor.editable = false;
documentation_save_button.dispatchEvent(new Event('click'))
documentation_block.classList.remove('active')
document.querySelector('#documentation-editor .godo--editor').setAttribute('contenteditable', 'false')
editor_link.setAttribute('aria-pressed', false)
if (title_byline) title_byline.style.visibility = 'visible'
} else {
@ -612,8 +609,7 @@ $(function() {
}
if (title_byline) title_byline.style.visibility = 'hidden'
editor_link.setAttribute('aria-pressed', true)
document.querySelector('#documentation-editor .godo--editor').setAttribute('contenteditable', 'true')
editor.showEdition()
editor.editable = true;
editor.view.focus()
}
})

View File

@ -96,6 +96,20 @@ SQL_TYPE_MAPPING = {
}
def _table_exists(cur, table_name):
cur.execute('SELECT 1 FROM pg_class WHERE relname = %s', (table_name,))
rows = cur.fetchall()
return len(rows) > 0
def _trigger_exists(cur, table_name, trigger_name):
cur.execute(
'SELECT 1 FROM pg_trigger WHERE tgrelid = %s::regclass AND tgname = %s', (table_name, trigger_name)
)
rows = cur.fetchall()
return len(rows) > 0
class WcsPgConnection(psycopg2.extensions.connection):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@ -1611,6 +1625,8 @@ def do_global_views(conn, cur):
% (name, category.id)
)
init_search_tokens_triggers(cur)
def clean_global_views(conn, cur):
# Purge of any dead data
@ -1703,11 +1719,182 @@ def init_global_table(conn=None, cur=None):
endpoint_status=endpoint_status_filter,
)
)
init_search_tokens_data(cur)
if own_conn:
cur.close()
def init_search_tokens(conn=None, cur=None):
"""Initialize the search_tokens mechanism.
It's based on three parts:
- a token table
- triggers to feed this table from the tsvectors used in the database
- a search function that will leverage these tokens to extend the search query.
So far, the sources used are wcs_all_forms and searchable_formdefs.
Example: let's say the sources texts are "Tarif d'école" and "La cantine".
This gives the following tsvectors: ('tarif', 'écol') and ('cantin')
Our tokens table will have these three words.
When the search function is launched, it splits the search query and will
replace unavailable tokens by those close, if available.
The search query 'tari' will be expanded to 'tarif'.
The search query 'collège' will remain unchanged (and return nothing)
If several tokens match or are close enough, the query will be expanded to
an OR.
"""
own_cur = False
if cur is None:
own_cur = True
conn, cur = get_connection_and_cursor()
# Create table
cur.execute('CREATE TABLE IF NOT EXISTS wcs_search_tokens(token TEXT PRIMARY KEY);')
# Create triggers
init_search_tokens_triggers(cur)
# Fill table
init_search_tokens_data(cur)
# Index at the end, small performance trick... not that useful, but it's free...
cur.execute('CREATE EXTENSION IF NOT EXISTS pg_trgm;')
cur.execute(
'CREATE INDEX IF NOT EXISTS wcs_search_tokens_trgm ON wcs_search_tokens USING gin(token gin_trgm_ops);'
)
# And last: functions to use this brand new table
# These two aggregates make the search query far simpler to write, allowing writing an OR/AND of search terms
# directly as an SQL aggregation.
# They use the tsquery_or and tsquery_and functions that are included in PostgreSQL since 8.3, but documented
# under their operator names || and &&.
cur.execute('CREATE OR REPLACE AGGREGATE tsquery_agg_or (tsquery) (sfunc=tsquery_or, stype=tsquery);')
cur.execute('CREATE OR REPLACE AGGREGATE tsquery_agg_and (tsquery) (sfunc=tsquery_and, stype=tsquery);')
cur.execute(
r"""CREATE OR REPLACE FUNCTION public.wcs_tsquery(text)
RETURNS tsquery
LANGUAGE sql
STABLE
AS $function$
WITH
tokenized AS (SELECT unnest(regexp_split_to_array($1, '\s+')) word),
super_tokenized AS (
-- perfect: tokens that are found as is in table, thus no OR required
-- partial: tokens found using distance search on tokens table (note: numbers are excluded here)
-- distance search is done using pg_trgm, https://www.postgresql.org/docs/current/pgtrgm.html
-- otherwise: token as is and likely no search result later
SELECT word,
coalesce((select perfect.token::tsquery FROM wcs_search_tokens AS perfect WHERE perfect.token = plainto_tsquery(word)::text),
tsquery_agg_or(partial.token::tsquery),
plainto_tsquery(word)) AS tokens
FROM tokenized
LEFT JOIN wcs_search_tokens AS partial ON partial.token % plainto_tsquery('simple', word)::text AND word not similar to '%[0-9]{2,}%'
GROUP BY word)
SELECT tsquery_agg_and(tokens) FROM super_tokenized;
$function$;"""
)
if own_cur:
cur.close()
def init_search_tokens_triggers(cur):
# We define only appending triggers, ie on INSERT and UPDATE.
# It would be far heavier to maintain deletions here, and having extra data has
# no or marginal side effect on search performances, and absolutely no impact
# on search results.
# Instead, a weekly cron job will delete obsolete entries, thus making it sure no
# personal data is kept uselessly.
# First part: the appending function
cur.execute(
"""CREATE OR REPLACE FUNCTION wcs_search_tokens_trigger_fn ()
RETURNS trigger
LANGUAGE plpgsql
AS $function$
BEGIN
INSERT INTO wcs_search_tokens SELECT unnest(tsvector_to_array(NEW.fts)) ON CONFLICT(token) DO NOTHING;
RETURN NEW;
END;
$function$;"""
)
if not (_table_exists(cur, 'wcs_search_tokens')):
# abort trigger creation if tokens table doesn't exist yet
return
if _table_exists(cur, 'wcs_all_forms') and not _trigger_exists(
cur, 'wcs_all_forms', 'wcs_all_forms_fts_trg_upd'
):
# Second part: insert and update triggers for wcs_all_forms
cur.execute(
"""CREATE TRIGGER wcs_all_forms_fts_trg_ins
AFTER INSERT ON wcs_all_forms
FOR EACH ROW WHEN (NEW.fts IS NOT NULL)
EXECUTE PROCEDURE wcs_search_tokens_trigger_fn();"""
)
cur.execute(
"""CREATE TRIGGER wcs_all_forms_fts_trg_upd
AFTER UPDATE OF fts ON wcs_all_forms
FOR EACH ROW WHEN (NEW.fts IS NOT NULL)
EXECUTE PROCEDURE wcs_search_tokens_trigger_fn();"""
)
if _table_exists(cur, 'searchable_formdefs') and not _trigger_exists(
cur, 'searchable_formdefs', 'searchable_formdefs_fts_trg_upd'
):
# Third part: insert and update triggers for searchable_formdefs
cur.execute(
"""CREATE TRIGGER searchable_formdefs_fts_trg_ins
AFTER INSERT ON searchable_formdefs
FOR EACH ROW WHEN (NEW.fts IS NOT NULL)
EXECUTE PROCEDURE wcs_search_tokens_trigger_fn();"""
)
cur.execute(
"""CREATE TRIGGER searchable_formdefs_fts_trg_upd
AFTER UPDATE OF fts ON searchable_formdefs
FOR EACH ROW WHEN (NEW.fts IS NOT NULL)
EXECUTE PROCEDURE wcs_search_tokens_trigger_fn();"""
)
def init_search_tokens_data(cur):
if not (_table_exists(cur, 'wcs_search_tokens')):
# abort table data initialization if tokens table doesn't exist yet
return
if _table_exists(cur, 'wcs_all_forms'):
cur.execute(
"""INSERT INTO wcs_search_tokens
SELECT unnest(tsvector_to_array(fts)) FROM wcs_all_forms
ON CONFLICT(token) DO NOTHING;"""
)
if _table_exists(cur, 'searchable_formdefs'):
cur.execute(
"""INSERT INTO wcs_search_tokens
SELECT unnest(tsvector_to_array(fts)) FROM searchable_formdefs
ON CONFLICT(token) DO NOTHING;"""
)
def purge_obsolete_search_tokens(cur=None):
own_cur = False
if cur is None:
own_cur = True
_, cur = get_connection_and_cursor()
cur.execute(
"""DELETE FROM wcs_search_tokens
WHERE token NOT IN (SELECT unnest(tsvector_to_array(fts)) FROM wcs_all_forms)
AND token NOT IN (SELECT unnest(tsvector_to_array(fts)) FROM searchable_formdefs);"""
)
if own_cur:
cur.close()
class SqlMixin:
_table_name = None
_numerical_id = True
@ -4881,7 +5068,6 @@ class SearchableFormDef(SqlMixin):
% (cls._table_name, cls._table_name)
)
cls.do_indexes(cur)
cur.close()
from wcs.carddef import CardDef
from wcs.formdef import FormDef
@ -4890,6 +5076,8 @@ class SearchableFormDef(SqlMixin):
CardDef.select(ignore_errors=True), FormDef.select(ignore_errors=True)
):
cls.update(obj=objectdef)
init_search_tokens(cur)
cur.close()
@classmethod
def update(cls, obj=None, removed_obj_type=None, removed_obj_id=None):
@ -4927,7 +5115,7 @@ class SearchableFormDef(SqlMixin):
def search(cls, obj_type, string):
_, cur = get_connection_and_cursor()
cur.execute(
'SELECT object_id FROM searchable_formdefs WHERE fts @@ plainto_tsquery(%s)',
'SELECT object_id FROM searchable_formdefs WHERE fts @@ wcs_tsquery(%s)',
(FtsMatch.get_fts_value(string),),
)
ids = [x[0] for x in cur.fetchall()]
@ -5192,7 +5380,7 @@ def get_period_total(
# latest migration, number + description (description is not used
# programmaticaly but will make sure git conflicts if two migrations are
# separately added with the same number)
SQL_LEVEL = (107, 'add test_uuid column to users table')
SQL_LEVEL = (108, 'new fts mechanism with tokens table')
def migrate_global_views(conn, cur):
@ -5529,6 +5717,10 @@ def migrate():
for formdef in FormDef.select() + CardDef.select():
do_formdef_tables(formdef, rebuild_views=False, rebuild_global_views=False)
if sql_level < 108:
# 108: new fts mechanism with tokens table
init_search_tokens()
if sql_level != SQL_LEVEL[0]:
cur.execute(
'''UPDATE wcs_meta SET value = %s, updated_at=NOW() WHERE key = %s''',

View File

@ -379,6 +379,11 @@ class FtsMatch(Criteria):
return 'fts @@ plainto_tsquery(%%(c%s)s)' % id(self.value)
class WcsFtsMatch(FtsMatch):
def as_sql(self):
return 'fts @@ wcs_tsquery(%%(c%s)s)' % id(self.value)
class ElementEqual(Criteria):
def __init__(self, attribute, key, value, **kwargs):
super().__init__(attribute, value)

View File

@ -2045,7 +2045,7 @@ class CardsSource:
def __getattr__(self, attr):
if attr == 'inspect_collapse':
return True
return False
try:
return LazyFormDef(CardDef.get_by_urlname(attr, use_cache=True))
except KeyError:
@ -2062,7 +2062,7 @@ class FormsSource:
def __getattr__(self, attr):
if attr == 'inspect_collapse':
return True
return False
try:
return LazyFormDef(FormDef.get_by_urlname(attr, use_cache=True))
except KeyError: