Compare commits

...

8 Commits

4 changed files with 119 additions and 26 deletions

View File

@ -446,6 +446,7 @@ class SubmissionDirectory(Directory):
def get_submittable_formdefs(self):
user = get_request().user
agent_ids = set()
list_forms = []
for formdef in FormDef.select(order_by='name', ignore_errors=True):
if formdef.is_disabled():
@ -459,6 +460,21 @@ class SubmissionDirectory(Directory):
continue
list_forms.append(formdef)
# prefetch formdatas
data_class = formdef.data_class()
formdef._formdatas = data_class.select(
[Equal('status', 'draft'), Equal('backoffice_submission', True)]
)
formdef._formdatas.sort(key=lambda x: x.receipt_time or make_aware(datetime.datetime(1900, 1, 1)))
agent_ids.update([x.submission_agent_id for x in formdef._formdatas if x.submission_agent_id])
# prefetch agents
self.prefetched_agents = {
str(x.id): x
for x in get_publisher().user_class.get_ids(list(agent_ids), ignore_errors=True)
if x is not None
}
return list_forms
def _q_index(self):
@ -501,15 +517,6 @@ class SubmissionDirectory(Directory):
if mode != 'create':
skip = True
for formdef in formdefs:
if not hasattr(formdef, '_formdatas'):
data_class = formdef.data_class()
formdata_ids = data_class.get_ids_with_indexed_value('status', 'draft')
formdef._formdatas = [
x for x in data_class.get_ids(formdata_ids) if x.backoffice_submission is True
]
formdef._formdatas.sort(
key=lambda x: x.receipt_time or make_aware(datetime.datetime(1900, 1, 1))
)
skip &= not (bool(formdef._formdatas))
if skip:
return
@ -553,9 +560,7 @@ class SubmissionDirectory(Directory):
else _('unknown date'),
}
if formdata.submission_agent_id:
agent_user = get_publisher().user_class.get(
formdata.submission_agent_id, ignore_errors=True
)
agent_user = self.prefetched_agents.get(formdata.submission_agent_id)
if agent_user:
label += ' (%s)' % agent_user.display_name
r += htmltext('<a href="%s/%s/">%s</a>') % (formdef.url_name, formdata.id, label)
@ -568,15 +573,12 @@ class SubmissionDirectory(Directory):
count = 0
mode = get_request().form.get('mode')
for formdef in formdefs:
if not hasattr(formdef, '_formdatas'):
data_class = formdef.data_class()
formdata_ids = data_class.get_ids_with_indexed_value('status', 'draft')
formdatas = [x for x in data_class.get_ids(formdata_ids) if x.backoffice_submission is True]
if mode == 'empty':
formdatas = [x for x in formdatas if x.has_empty_data()]
elif mode == 'existing':
formdatas = [x for x in formdatas if not x.has_empty_data()]
count += len(formdatas)
formdatas = formdef._formdatas
if mode == 'empty':
formdatas = [x for x in formdatas if x.has_empty_data()]
elif mode == 'existing':
formdatas = [x for x in formdatas if not x.has_empty_data()]
count += len(formdatas)
return misc.json_response({'count': count})
def _q_lookup(self, component):

View File

@ -690,6 +690,10 @@ class QommonPublisher(Publisher):
for error in self.loggederror_class.select(clause=clauses):
self.loggederror_class.remove_object(error.id)
def clean_search_tokens(self, **kwargs):
from wcs import sql
sql.purge_obsolete_search_tokens()
@classmethod
def register_cronjobs(cls):
cls.register_cronjob(CronJob(cls.clean_sessions, minutes=[0], name='clean_sessions'))
@ -702,6 +706,7 @@ class QommonPublisher(Publisher):
cls.register_cronjob(
CronJob(cls.clean_loggederrors, hours=[3], minutes=[0], name='clean_loggederrors')
)
cls.register_cronjob(CronJob(cls.clean_search_tokens, weekdays=[0], hours=[1], minutes=[0], name='clean_tokens'))
_initialized = False

View File

@ -1579,6 +1579,8 @@ def do_global_views(conn, cur):
% (name, category.id)
)
init_search_tokens_triggers(cur)
def clean_global_views(conn, cur):
# Purge of any dead data
@ -1671,11 +1673,91 @@ def init_global_table(conn=None, cur=None):
endpoint_status=endpoint_status_filter,
)
)
init_search_tokens_data(cur)
if own_conn:
cur.close()
def init_search_tokens(conn=None, cur=None):
own_cur = False
if not cur:
own_cur = True
conn, cur = get_connection_and_cursor()
cur.execute("SELECT 1 FROM pg_class WHERE relname = 'wcs_search_tokens';")
rows = cur.fetchall()
if len(rows) == 1:
# table already exists, nothing to do.
if own_cur:
cur.close()
return
# Create table
cur.execute("CREATE TABLE wcs_search_tokens(token TEXT PRIMARY KEY);")
# Create triggers from wcs_all_forms
init_search_tokens_triggers(cur)
# Fill table from wcs_all_forms
init_search_tokens_data(cur)
# Index at the end, small performance trick... not that useful, but it's free...
cur.execute("CREATE INDEX IF NOT EXISTS wcs_search_tokens_trgm ON wcs_search_tokens USING gin(token gin_trgm_ops);")
# And last: functions to use this brand new table
cur.execute("CREATE OR REPLACE AGGREGATE tsquery_agg_or (tsquery) (sfunc=tsquery_or, stype=tsquery);")
cur.execute("CREATE OR REPLACE AGGREGATE tsquery_agg_and (tsquery) (sfunc=tsquery_and, stype=tsquery);")
cur.execute("""CREATE OR REPLACE FUNCTION public.wcs_tsquery(text)
RETURNS tsquery
LANGUAGE sql
STABLE
AS $function$
with
tokenized as (select unnest(regexp_split_to_array($1, '\s+')) w),
super_tokenized as (select w, coalesce(tsquery_agg_or(plainto_tsquery(token) order by token <-> w desc), plainto_tsquery(w)) tokens from tokenized left join wcs_search_tokens on token % w group by w)
select tsquery_agg_and(tokens) from super_tokenized;
$function$;""")
if own_cur:
cur.close()
def init_search_tokens_triggers(cur):
# We define only appending triggers, ie on INSERT and UPDATE.
# It would be far heavier to maintain deletions here, and having extra data has
# no or marginal side effect on search performances, and absolutely no impact
# on search results.
# Instead, a weekly cron job will delete obsolete entries, thus making it sure no
# personal data is kept uselessly.
# First part: the appending function
cur.execute("""CREATE OR REPLACE FUNCTION wcs_search_tokens_trigger_fn ()
RETURNS trigger
LANGUAGE plpgsql
AS $function$
BEGIN
INSERT INTO wcs_search_tokens SELECT unnest(tsvector_to_array(NEW.fts)) ON CONFLICT(token) DO NOTHING;
RETURN NEW;
END;
$function$;""")
# Second part: insert and update triggers
cur.execute("CREATE TRIGGER wcs_all_forms_fts_trg_ins AFTER INSERT ON wcs_all_forms FOR EACH ROW WHEN (NEW.fts IS NOT NULL) EXECUTE PROCEDURE wcs_search_tokens_trigger_fn();")
cur.execute("CREATE TRIGGER wcs_all_forms_fts_trg_upd AFTER UPDATE OF fts ON wcs_all_forms FOR EACH ROW WHEN (NEW.fts IS NOT NULL) EXECUTE PROCEDURE wcs_search_tokens_trigger_fn();")
def init_search_tokens_data(cur):
cur.execute("INSERT INTO wcs_search_tokens SELECT unnest(tsvector_to_array(fts)) FROM wcs_all_forms ON CONFLICT(token) DO NOTHING;")
def purge_obsolete_search_tokens(cur=None):
own_cur = False
if cur is None:
own_cur = True
conn, cur = get_connection_and_cursor()
cur.execute("DELETE FROM wcs_search_tokens WHERE token NOT IN (SELECT unnest(tsvector_to_array(fts)) FROM wcs_all_forms);")
if own_cur:
cur.close()
class SqlMixin:
_table_name = None
_numerical_id = True
@ -1751,7 +1833,7 @@ class SqlMixin:
sql_statement = (
'''SELECT id FROM %s
WHERE fts @@ plainto_tsquery(%%(value)s)'''
WHERE fts @@ wcs_tsquery(%%(value)s)'''
% cls._table_name
)
cur.execute(sql_statement, {'value': FtsMatch.get_fts_value(query)})
@ -2217,7 +2299,7 @@ class SqlMixin:
except IndexError:
pass
else:
sql_statement += ' ORDER BY ts_rank(fts, plainto_tsquery(%%(c%s)s)) DESC' % id(fts.value)
sql_statement += ' ORDER BY ts_rank(fts, wcs_tsquery(%%(c%s)s)) DESC' % id(fts.value)
else:
sql_statement += cls.get_order_by_clause(order_by)
cur.execute(sql_statement, parameters)
@ -4835,7 +4917,7 @@ class SearchableFormDef(SqlMixin):
def search(cls, obj_type, string):
_, cur = get_connection_and_cursor()
cur.execute(
'SELECT object_id FROM searchable_formdefs WHERE fts @@ plainto_tsquery(%s)',
'SELECT object_id FROM searchable_formdefs WHERE fts @@ wcs_tsquery(%s)',
(FtsMatch.get_fts_value(string),),
)
ids = [x[0] for x in cur.fetchall()]
@ -5100,7 +5182,7 @@ def get_period_total(
# latest migration, number + description (description is not used
# programmaticaly but will make sure git conflicts if two migrations are
# separately added with the same number)
SQL_LEVEL = (105, 'change test result json structure')
SQL_LEVEL = (106, 'improved fts method')
def migrate_global_views(conn, cur):
@ -5433,6 +5515,10 @@ def migrate():
for formdef in FormDef.select() + CardDef.select():
do_formdef_tables(formdef, rebuild_views=False, rebuild_global_views=False)
if sql_level < 106:
# 106: new fts mechanism with tokens table
init_search_tokens()
if sql_level != SQL_LEVEL[0]:
cur.execute(
'''UPDATE wcs_meta SET value = %s, updated_at=NOW() WHERE key = %s''',

View File

@ -370,7 +370,7 @@ class FtsMatch(Criteria):
return unidecode.unidecode(value)
def as_sql(self):
return 'fts @@ plainto_tsquery(%%(c%s)s)' % id(self.value)
return 'fts @@ wcs_tsquery(%%(c%s)s)' % id(self.value)
class ElementEqual(Criteria):