misc: apply double-quote-string-fixer (#80309)

This commit is contained in:
Valentin Deniaud 2023-08-07 14:14:12 +02:00
parent e72502cb7a
commit 1e2264dd8c
68 changed files with 381 additions and 381 deletions

View File

@ -2,8 +2,8 @@
import os import os
import sys import sys
if __name__ == "__main__": if __name__ == '__main__':
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wcs.settings") os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'wcs.settings')
from django.core.management import execute_from_command_line from django.core.management import execute_from_command_line

View File

@ -171,9 +171,9 @@ cmdclass = {
setup( setup(
name='wcs', name='wcs',
version=get_version(), version=get_version(),
maintainer="Frederic Peters", maintainer='Frederic Peters',
maintainer_email="fpeters@entrouvert.com", maintainer_email='fpeters@entrouvert.com',
url="http://wcs.labs.libre-entreprise.org", url='http://wcs.labs.libre-entreprise.org',
install_requires=[ install_requires=[
'Quixote>=3.0,<3.2', 'Quixote>=3.0,<3.2',
'django>=3.2', 'django>=3.2',

View File

@ -1996,7 +1996,7 @@ def test_form_edit_field_advanced(pub):
resp = resp.forms[0].submit('submit') resp = resp.forms[0].submit('submit')
assert resp.location == 'http://example.net/backoffice/forms/1/fields/#fieldId_2' assert resp.location == 'http://example.net/backoffice/forms/1/fields/#fieldId_2'
resp = resp.follow() resp = resp.follow()
assert "Are you sure you want to prefill" not in resp.text assert 'Are you sure you want to prefill' not in resp.text
def test_form_edit_field_display(pub): def test_form_edit_field_display(pub):

View File

@ -600,7 +600,7 @@ def test_workflows_copy_status_item(pub):
pub.write_cfg() pub.write_cfg()
resp = app.get('/backoffice/workflows/%s/status/%s/' % (workflow.id, st1.id)) resp = app.get('/backoffice/workflows/%s/status/%s/' % (workflow.id, st1.id))
resp = resp.click(href="items/_commentable/copy") resp = resp.click(href='items/_commentable/copy')
resp = resp.form.submit('submit') resp = resp.form.submit('submit')
assert '<ul><li>Unknown roles: unknown</li></ul>' in resp assert '<ul><li>Unknown roles: unknown</li></ul>' in resp
@ -1200,12 +1200,12 @@ def test_workflows_edit_email_action(pub):
# attachments without backoffice fields: python expressions # attachments without backoffice fields: python expressions
resp = app.get(item_url) resp = app.get(item_url)
assert "Attachments (templates)" in resp.text assert 'Attachments (templates)' in resp.text
resp.form['attachments$element0'] = 'form_var_upload_raw' resp.form['attachments$element0'] = 'form_var_upload_raw'
resp = resp.form.submit('submit') resp = resp.form.submit('submit')
assert resp.location assert resp.location
resp = app.get(item_url) resp = app.get(item_url)
assert "Attachments (templates)" in resp.text assert 'Attachments (templates)' in resp.text
assert resp.form['attachments$element0'].value == 'form_var_upload_raw' assert resp.form['attachments$element0'].value == 'form_var_upload_raw'
sendmail = Workflow.get(workflow.id).get_status(st1.id).items[0] sendmail = Workflow.get(workflow.id).get_status(st1.id).items[0]
assert sendmail.attachments == ['form_var_upload_raw'] assert sendmail.attachments == ['form_var_upload_raw']
@ -1220,8 +1220,8 @@ def test_workflows_edit_email_action(pub):
] ]
workflow.store() workflow.store()
resp = app.get(item_url) resp = app.get(item_url)
assert "Attachments" in resp.text assert 'Attachments' in resp.text
assert "Attachments (templates)" not in resp.text assert 'Attachments (templates)' not in resp.text
assert resp.form['attachments$element0$choice'].value == '{{form_var_upload_raw}}' assert resp.form['attachments$element0$choice'].value == '{{form_var_upload_raw}}'
assert len(resp.form['attachments$element0$choice'].options) == 5 assert len(resp.form['attachments$element0$choice'].options) == 5
resp = resp.form.submit('attachments$add_element') # add one resp = resp.form.submit('attachments$add_element') # add one
@ -1291,7 +1291,7 @@ def test_workflows_edit_email_action(pub):
workflow.backoffice_fields_formdef.fields = [] workflow.backoffice_fields_formdef.fields = []
workflow.store() workflow.store()
resp = app.get(item_url) resp = app.get(item_url)
assert "Attachments (templates)" in resp.text assert 'Attachments (templates)' in resp.text
resp = resp.form.submit('submit') resp = resp.form.submit('submit')
assert resp.location assert resp.location
sendmail = Workflow.get(workflow.id).get_status(st1.id).items[0] sendmail = Workflow.get(workflow.id).get_status(st1.id).items[0]
@ -2695,21 +2695,21 @@ def test_workflows_global_actions_external_workflow_action(pub):
assert 'External workflow (not completed)' in resp.text assert 'External workflow (not completed)' in resp.text
resp = app.get('/backoffice/workflows/%s/status/%s/items/1/' % (workflow.id, st.id)) resp = app.get('/backoffice/workflows/%s/status/%s/items/1/' % (workflow.id, st.id))
assert "No workflow with external triggerable global action." in resp.text assert 'No workflow with external triggerable global action.' in resp.text
trigger.identifier = 'test' trigger.identifier = 'test'
wf.store() wf.store()
resp = app.get('/backoffice/workflows/%s/status/%s/items/1/' % (workflow.id, st.id)) resp = app.get('/backoffice/workflows/%s/status/%s/items/1/' % (workflow.id, st.id))
resp = resp.forms[0].submit('submit') resp = resp.forms[0].submit('submit')
assert "required field" in resp.text assert 'required field' in resp.text
resp.forms[0]['slug'] = 'formdef:%s' % formdef.url_name resp.forms[0]['slug'] = 'formdef:%s' % formdef.url_name
assert ( assert (
resp.pyquery('select#form_slug option')[1].attrib['data-goto-url'] resp.pyquery('select#form_slug option')[1].attrib['data-goto-url']
== 'http://example.net/backoffice/forms/1/' == 'http://example.net/backoffice/forms/1/'
) )
resp = resp.forms[0].submit('submit') resp = resp.forms[0].submit('submit')
assert "required field" in resp.text assert 'required field' in resp.text
resp = resp.forms[0].submit('submit') resp = resp.forms[0].submit('submit')
resp.forms[0]['trigger_id'] = 'action:%s' % trigger.identifier resp.forms[0]['trigger_id'] = 'action:%s' % trigger.identifier
resp = resp.forms[0].submit('submit').follow().follow() resp = resp.forms[0].submit('submit').follow().follow()
@ -2767,7 +2767,7 @@ def test_workflows_external_workflow_action_config(pub):
resp = resp.forms[0].submit('submit') resp = resp.forms[0].submit('submit')
assert 'There were errors processing your form. See below for details.' not in resp assert 'There were errors processing your form. See below for details.' not in resp
assert 'This action is configured in two steps. See below for details.' in resp assert 'This action is configured in two steps. See below for details.' in resp
assert "required field" in resp assert 'required field' in resp
# multiple errors: do as usual # multiple errors: do as usual
resp.forms[0]['slug'] = 'formdef:external' resp.forms[0]['slug'] = 'formdef:external'
resp.forms[0]['condition$type'] = 'django' resp.forms[0]['condition$type'] = 'django'
@ -2775,7 +2775,7 @@ def test_workflows_external_workflow_action_config(pub):
resp = resp.forms[0].submit('submit') resp = resp.forms[0].submit('submit')
assert 'There were errors processing your form. See below for details.' in resp assert 'There were errors processing your form. See below for details.' in resp
assert 'This action is configured in two steps. See below for details.' not in resp assert 'This action is configured in two steps. See below for details.' not in resp
assert "required field" in resp assert 'required field' in resp
assert "syntax error: Could not parse the remainder: '{{' from '{{'" in resp assert "syntax error: Could not parse the remainder: '{{' from '{{'" in resp

View File

@ -1301,7 +1301,7 @@ def test_formdef_submit_structured(pub, local_user):
for post_data in [ for post_data in [
# straight id # straight id
{'0': '0', "1": '3'}, {'0': '0', '1': '3'},
# varnames # varnames
{'foobar': '0', 'foobar1': '3'}, {'foobar': '0', 'foobar1': '3'},
# varnames with integer as values # varnames with integer as values
@ -1318,10 +1318,10 @@ def test_formdef_submit_structured(pub, local_user):
rsps.get( rsps.get(
'http://datasource.com', 'http://datasource.com',
json={ json={
"data": [ 'data': [
{"id": 0, "text": "zéro", "foo": "bar"}, {'id': 0, 'text': 'zéro', 'foo': 'bar'},
{"id": 1, "text": "uné", "foo": "bar1"}, {'id': 1, 'text': 'uné', 'foo': 'bar1'},
{"id": 2, "text": "deux", "foo": "bar2"}, {'id': 2, 'text': 'deux', 'foo': 'bar2'},
] ]
}, },
) )
@ -1514,7 +1514,7 @@ def test_cards_filter_function(pub, local_user):
custom_view.title = 'shared carddef custom view' custom_view.title = 'shared carddef custom view'
custom_view.formdef = carddef custom_view.formdef = carddef
custom_view.columns = {'list': [{'id': '0'}]} custom_view.columns = {'list': [{'id': '0'}]}
custom_view.filters = {"filter-user-function": "on", "filter-user-function-value": "_foobar"} custom_view.filters = {'filter-user-function': 'on', 'filter-user-function-value': '_foobar'}
custom_view.visibility = 'any' custom_view.visibility = 'any'
custom_view.store() custom_view.store()

View File

@ -188,7 +188,7 @@ def test_api_list_formdata_custom_view(pub, local_user):
custom_view.title = 'custom view' custom_view.title = 'custom view'
custom_view.formdef = formdef custom_view.formdef = formdef
custom_view.columns = {'list': [{'id': '0'}]} custom_view.columns = {'list': [{'id': '0'}]}
custom_view.filters = {"filter": "done", "filter-status": "on"} custom_view.filters = {'filter': 'done', 'filter-status': 'on'}
custom_view.visibility = 'any' custom_view.visibility = 'any'
custom_view.store() custom_view.store()
@ -252,7 +252,7 @@ def test_api_ods_formdata_custom_view(pub, local_user):
custom_view.title = 'custom view' custom_view.title = 'custom view'
custom_view.formdef = formdef custom_view.formdef = formdef
custom_view.columns = {'list': [{'id': '0'}]} custom_view.columns = {'list': [{'id': '0'}]}
custom_view.filters = {"filter": "done", "filter-status": "on"} custom_view.filters = {'filter': 'done', 'filter-status': 'on'}
custom_view.visibility = 'any' custom_view.visibility = 'any'
custom_view.store() custom_view.store()
@ -306,7 +306,7 @@ def test_api_geojson_formdata_custom_view(pub, local_user):
custom_view.title = 'custom view' custom_view.title = 'custom view'
custom_view.formdef = formdef custom_view.formdef = formdef
custom_view.columns = {'list': [{'id': '0'}]} custom_view.columns = {'list': [{'id': '0'}]}
custom_view.filters = {"filter": "done", "filter-status": "on"} custom_view.filters = {'filter': 'done', 'filter-status': 'on'}
custom_view.visibility = 'any' custom_view.visibility = 'any'
custom_view.store() custom_view.store()
@ -355,7 +355,7 @@ def test_api_get_formdata_custom_view(pub, local_user):
custom_view.title = 'custom view' custom_view.title = 'custom view'
custom_view.formdef = formdef custom_view.formdef = formdef
custom_view.columns = {'list': [{'id': '0'}]} custom_view.columns = {'list': [{'id': '0'}]}
custom_view.filters = {"filter": "done", "filter-status": "on"} custom_view.filters = {'filter': 'done', 'filter-status': 'on'}
custom_view.visibility = 'any' custom_view.visibility = 'any'
custom_view.store() custom_view.store()

View File

@ -564,7 +564,7 @@ def test_formdata_edit(pub, local_user):
# bad payload: not a dict, missing data entry # bad payload: not a dict, missing data entry
get_app(pub).post_json( get_app(pub).post_json(
sign_uri('/api/forms/test/%s/' % formdata.id, user=local_user), sign_uri('/api/forms/test/%s/' % formdata.id, user=local_user),
"not a dict", 'not a dict',
status=400, status=400,
) )
get_app(pub).post_json( get_app(pub).post_json(
@ -2711,14 +2711,14 @@ def test_api_geojson_formdata(pub, local_user):
if field['label'] == 'User Label': if field['label'] == 'User Label':
assert field['varname'] == 'user_label' assert field['varname'] == 'user_label'
assert field['value'] == username assert field['value'] == username
assert field['html_value'] == "&lt;font color=&quot;red&quot;&gt;Jean Darmette&lt;/font&gt;" assert field['html_value'] == '&lt;font color=&quot;red&quot;&gt;Jean Darmette&lt;/font&gt;'
if field['label'] == 'foobar': if field['label'] == 'foobar':
assert field['varname'] == 'foobar' assert field['varname'] == 'foobar'
assert field['value'] == foobar assert field['value'] == foobar
assert field['html_value'] == "&lt;font color=&quot;red&quot;&gt;FOO BAR&lt;/font&gt;" assert field['html_value'] == '&lt;font color=&quot;red&quot;&gt;FOO BAR&lt;/font&gt;'
if field['label'] == 'foobar1': if field['label'] == 'foobar1':
assert field['varname'] is None assert field['varname'] is None
assert field['value'] == "test.txt" assert field['value'] == 'test.txt'
assert field['html_value'] == ( assert field['html_value'] == (
'<div class="file-field"><a download="test.txt" href="http://example.net/backoffice/management/test/28/download?f=1">' '<div class="file-field"><a download="test.txt" href="http://example.net/backoffice/management/test/28/download?f=1">'
'<span>test.txt</span></a></div>' '<span>test.txt</span></a></div>'

View File

@ -925,10 +925,10 @@ def test_formdef_submit_structured(pub, local_user):
with responses.RequestsMock() as rsps: with responses.RequestsMock() as rsps:
json_data = { json_data = {
"data": [ 'data': [
{"id": 0, "text": "zéro", "foo": "bar"}, {'id': 0, 'text': 'zéro', 'foo': 'bar'},
{"id": 1, "text": "uné", "foo": "bar1"}, {'id': 1, 'text': 'uné', 'foo': 'bar1'},
{"id": 2, "text": "deux", "foo": "bar2"}, {'id': 2, 'text': 'deux', 'foo': 'bar2'},
] ]
} }
rsps.get('http://datasource.com', json=json_data) rsps.get('http://datasource.com', json=json_data)
@ -1020,9 +1020,9 @@ def test_formdef_submit_structured_with_block_field(pub, local_user):
with responses.RequestsMock() as rsps: with responses.RequestsMock() as rsps:
json_data = { json_data = {
"data": [ 'data': [
{"id": 0, "text": "zéro", "foo": "bar"}, {'id': 0, 'text': 'zéro', 'foo': 'bar'},
{"id": 2, "text": "deux", "foo": "bar2"}, {'id': 2, 'text': 'deux', 'foo': 'bar2'},
] ]
} }
rsps.get('http://datasource.com', json=json_data) rsps.get('http://datasource.com', json=json_data)

View File

@ -996,11 +996,11 @@ def test_backoffice_multi_actions_oldest_form(pub):
workflow.id = '2' workflow.id = '2'
action = workflow.add_global_action('Mark as duplicates') action = workflow.add_global_action('Mark as duplicates')
jump = action.add_action('jump') jump = action.add_action('jump')
jump.condition = {'type': 'django', 'value': "mass_action_index != 0"} jump.condition = {'type': 'django', 'value': 'mass_action_index != 0'}
jump.status = 'rejected' jump.status = 'rejected'
jump2 = action.add_action('jump') jump2 = action.add_action('jump')
jump2.condition = {'type': 'django', 'value': "mass_action_index == 0"} jump2.condition = {'type': 'django', 'value': 'mass_action_index == 0'}
jump2.status = 'accepted' jump2.status = 'accepted'
register_comment = workflow.possible_status[2].add_action('register-comment', id='_comment') register_comment = workflow.possible_status[2].add_action('register-comment', id='_comment')
@ -3496,7 +3496,7 @@ def test_count_open(pub):
# check the callback parameter is ignored, that we still get the default # check the callback parameter is ignored, that we still get the default
# criterias when it's set. # criterias when it's set.
resp = login(get_app(pub)).get('/backoffice/management/count?callback=toto') resp = login(get_app(pub)).get('/backoffice/management/count?callback=toto')
assert "20" in resp.text assert '20' in resp.text
def test_count_backoffice_drafts(pub): def test_count_backoffice_drafts(pub):

View File

@ -429,7 +429,7 @@ def test_backoffice_cards_import_data_from_csv(pub):
'"value",' '"value",'
'"id1|id2|...",' '"id1|id2|...",'
'"value"' '"value"'
"\r\n" % (pub.get_default_position(), today) '\r\n' % (pub.get_default_position(), today)
) )
# missing file # missing file

View File

@ -411,10 +411,10 @@ def test_backoffice_item_filter(pub):
resp.forms['listing-settings']['filter-4-operator'].value = 'between' resp.forms['listing-settings']['filter-4-operator'].value = 'between'
for value in [ for value in [
card_ids['baz'], card_ids['baz'],
"%s|%s|%s" % (card_ids['baz'], card_ids['bar'], card_ids['foo']), '%s|%s|%s' % (card_ids['baz'], card_ids['bar'], card_ids['foo']),
"|", '|',
"%s|" % card_ids['baz'], '%s|' % card_ids['baz'],
"|%s" % card_ids['baz'], '|%s' % card_ids['baz'],
]: ]:
resp.forms['listing-settings']['filter-4-value'].force_value(value) resp.forms['listing-settings']['filter-4-value'].force_value(value)
resp = resp.forms['listing-settings'].submit() resp = resp.forms['listing-settings'].submit()

View File

@ -248,7 +248,7 @@ def test_inspect_page(pub, local_user):
# test tools # test tools
resp = app.get('%sinspect' % formdata.get_url(backoffice=True), status=200) resp = app.get('%sinspect' % formdata.get_url(backoffice=True), status=200)
assert "Test tool" in resp.text assert 'Test tool' in resp.text
resp.form['test_mode'] = 'python-condition' resp.form['test_mode'] = 'python-condition'

View File

@ -655,7 +655,7 @@ def test_backoffice_submission_drafts(pub):
assert resp.form['f1'].value == 'test submission' assert resp.form['f1'].value == 'test submission'
resp = resp.form.submit('submit') resp = resp.form.submit('submit')
assert "Check values then click submit." in resp.text assert 'Check values then click submit.' in resp.text
resp = resp.form.submit('submit') resp = resp.form.submit('submit')
# check it kept the same id # check it kept the same id
assert resp.location == 'http://example.net/backoffice/management/form-title/%s/' % formdata_no assert resp.location == 'http://example.net/backoffice/management/form-title/%s/' % formdata_no

View File

@ -1089,7 +1089,7 @@ def test_form_multi_page_condition_on_first_page(pub):
resp = get_app(pub).get('/test/') resp = get_app(pub).get('/test/')
formdef.data_class().wipe() formdef.data_class().wipe()
# should be on second page already # should be on second page already
assert resp.pyquery(".buttons button.form-previous[hidden][disabled]") assert resp.pyquery('.buttons button.form-previous[hidden][disabled]')
resp.form['f3'] = 'foo' resp.form['f3'] = 'foo'
assert_current_page(resp, '2nd page') assert_current_page(resp, '2nd page')
resp = resp.form.submit('submit') # -> 3rd page resp = resp.form.submit('submit') # -> 3rd page
@ -1102,7 +1102,7 @@ def test_form_multi_page_condition_on_first_page(pub):
resp = resp.form.submit('previous') # -> 2nd page resp = resp.form.submit('previous') # -> 2nd page
assert_current_page(resp, '2nd page') assert_current_page(resp, '2nd page')
assert resp.form['f3'] assert resp.form['f3']
assert resp.pyquery(".buttons button.form-previous[hidden][disabled]") assert resp.pyquery('.buttons button.form-previous[hidden][disabled]')
def test_form_multi_page_condition_on_first_and_next(pub): def test_form_multi_page_condition_on_first_and_next(pub):
@ -9070,10 +9070,10 @@ def test_form_item_timetable_data_source(pub, http_requests):
with responses.RequestsMock() as rsps: with responses.RequestsMock() as rsps:
data = { data = {
"data": [ 'data': [
{"id": "1", "datetime": "2021-01-12 10:00:00", "text": "event 1"}, {'id': '1', 'datetime': '2021-01-12 10:00:00', 'text': 'event 1'},
{"id": "2", "datetime": "2021-01-13 10:20:00", "text": "event 2"}, {'id': '2', 'datetime': '2021-01-13 10:20:00', 'text': 'event 2'},
{"id": "3", "datetime": "2021-01-14 10:40:00", "text": "event 3"}, {'id': '3', 'datetime': '2021-01-14 10:40:00', 'text': 'event 3'},
] ]
} }
rsps.get('http://remote.example.net/api/datetimes', json=data) rsps.get('http://remote.example.net/api/datetimes', json=data)
@ -9124,10 +9124,10 @@ def test_form_item_timetable_data_source_with_date_alignment(pub, http_requests)
with responses.RequestsMock() as rsps: with responses.RequestsMock() as rsps:
data = { data = {
"data": [ 'data': [
{"id": "1", "datetime": "2021-01-12 10:00:00", "text": "event 1"}, {'id': '1', 'datetime': '2021-01-12 10:00:00', 'text': 'event 1'},
{"id": "2", "datetime": "2021-01-13 10:20:00", "text": "event 2"}, {'id': '2', 'datetime': '2021-01-13 10:20:00', 'text': 'event 2'},
{"id": "3", "datetime": "2021-01-14 10:40:00", "text": "event 3"}, {'id': '3', 'datetime': '2021-01-14 10:40:00', 'text': 'event 3'},
] ]
} }
rsps.get('http://remote.example.net/api/datetimes', json=data) rsps.get('http://remote.example.net/api/datetimes', json=data)

View File

@ -837,7 +837,7 @@ def test_formdata_generated_document_odt_to_pdf_download_push_to_portfolio(
resp = login(get_app(pub), username='foo', password='foo').get(form_location) resp = login(get_app(pub), username='foo', password='foo').get(form_location)
with responses.RequestsMock() as rsps: with responses.RequestsMock() as rsps:
rsps.post( rsps.post(
'http://fargo.example.net/api/documents/push/', status=400, json={"code": "document-exists"} 'http://fargo.example.net/api/documents/push/', status=400, json={'code': 'document-exists'}
) )
resp = resp.form.submit('button_export_to') resp = resp.form.submit('button_export_to')
assert len(rsps.calls) == 1 assert len(rsps.calls) == 1

View File

@ -796,15 +796,15 @@ def test_field_live_timetable_select(pub, http_requests):
with responses.RequestsMock() as rsps: with responses.RequestsMock() as rsps:
data = { data = {
"data": [ 'data': [
{"id": "1", "datetime": "2021-01-12 10:00:00", "text": "event 1", "api": {}}, {'id': '1', 'datetime': '2021-01-12 10:00:00', 'text': 'event 1', 'api': {}},
{"id": "2", "datetime": "2021-01-13 10:20:00", "text": "event 2", "api": {}}, {'id': '2', 'datetime': '2021-01-13 10:20:00', 'text': 'event 2', 'api': {}},
{ {
"id": "3", 'id': '3',
"datetime": "2021-01-14 10:40:00", 'datetime': '2021-01-14 10:40:00',
"text": "event 3", 'text': 'event 3',
"api": {}, 'api': {},
"disabled": True, 'disabled': True,
}, },
] ]
} }
@ -2170,7 +2170,7 @@ def test_comment_from_card_field(pub):
for i, value in enumerate(['foo', 'bar']): for i, value in enumerate(['foo', 'bar']):
carddata = carddef.data_class()() carddata = carddef.data_class()()
carddata.data = { carddata.data = {
'0': "%s {{ form_var_foo }}" % value, '0': '%s {{ form_var_foo }}' % value,
} }
carddata.just_created() carddata.just_created()
carddata.store() carddata.store()

View File

@ -265,7 +265,7 @@ def test_item_field_jsonvalue_datasource(requests_pub):
def test_jsonvalue_datasource(pub): def test_jsonvalue_datasource(pub):
plain_list = [{"id": "1", "text": "foo"}, {"id": "2", "text": "bar"}] plain_list = [{'id': '1', 'text': 'foo'}, {'id': '2', 'text': 'bar'}]
datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list)} datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list)}
assert data_sources.get_items(datasource) == [ assert data_sources.get_items(datasource) == [
('1', 'foo', '1', {'id': '1', 'text': 'foo'}), ('1', 'foo', '1', {'id': '1', 'text': 'foo'}),
@ -277,7 +277,7 @@ def test_jsonvalue_datasource(pub):
] ]
# with key # with key
plain_list = [{"id": "1", "text": "foo", "key": "a"}, {"id": "2", "text": "bar", "key": "b"}] plain_list = [{'id': '1', 'text': 'foo', 'key': 'a'}, {'id': '2', 'text': 'bar', 'key': 'b'}]
datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list)} datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list)}
assert data_sources.get_items(datasource) == [ assert data_sources.get_items(datasource) == [
('1', 'foo', 'a', {'id': '1', 'key': 'a', 'text': 'foo'}), ('1', 'foo', 'a', {'id': '1', 'key': 'a', 'text': 'foo'}),
@ -308,7 +308,7 @@ def test_jsonvalue_datasource_errors(pub):
# not a list of dict # not a list of dict
pub.loggederror_class.wipe() pub.loggederror_class.wipe()
plain_list = ["foobar"] plain_list = ['foobar']
datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list), 'record_on_errors': True} datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list), 'record_on_errors': True}
assert data_sources.get_items(datasource) == [] assert data_sources.get_items(datasource) == []
assert pub.loggederror_class.count() == 1 assert pub.loggederror_class.count() == 1
@ -317,7 +317,7 @@ def test_jsonvalue_datasource_errors(pub):
assert logged_error.summary == "[DATASOURCE] JSON data source ('[\"foobar\"]') gave a non usable result" assert logged_error.summary == "[DATASOURCE] JSON data source ('[\"foobar\"]') gave a non usable result"
pub.loggederror_class.wipe() pub.loggederror_class.wipe()
plain_list = [{'foo': 'bar'}, "foobar"] plain_list = [{'foo': 'bar'}, 'foobar']
datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list), 'record_on_errors': True} datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list), 'record_on_errors': True}
assert data_sources.get_items(datasource) == [] assert data_sources.get_items(datasource) == []
assert pub.loggederror_class.count() == 1 assert pub.loggederror_class.count() == 1
@ -330,7 +330,7 @@ def test_jsonvalue_datasource_errors(pub):
# no id found # no id found
pub.loggederror_class.wipe() pub.loggederror_class.wipe()
plain_list = [{"text": "foo"}, {"id": "2", "text": "bar"}] plain_list = [{'text': 'foo'}, {'id': '2', 'text': 'bar'}]
datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list), 'record_on_errors': True} datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list), 'record_on_errors': True}
assert data_sources.get_items(datasource) == [] assert data_sources.get_items(datasource) == []
assert pub.loggederror_class.count() == 1 assert pub.loggederror_class.count() == 1
@ -342,7 +342,7 @@ def test_jsonvalue_datasource_errors(pub):
) )
pub.loggederror_class.wipe() pub.loggederror_class.wipe()
plain_list = [{"id": "1", "text": "foo"}, {"id": "", "text": "bar"}] plain_list = [{'id': '1', 'text': 'foo'}, {'id': '', 'text': 'bar'}]
datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list), 'record_on_errors': True} datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list), 'record_on_errors': True}
assert data_sources.get_items(datasource) == [] assert data_sources.get_items(datasource) == []
assert pub.loggederror_class.count() == 1 assert pub.loggederror_class.count() == 1
@ -355,7 +355,7 @@ def test_jsonvalue_datasource_errors(pub):
# no text found # no text found
pub.loggederror_class.wipe() pub.loggederror_class.wipe()
plain_list = [{"id": "1"}, {"id": "2", "text": "bar"}] plain_list = [{'id': '1'}, {'id': '2', 'text': 'bar'}]
datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list), 'record_on_errors': True} datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list), 'record_on_errors': True}
assert data_sources.get_items(datasource) == [] assert data_sources.get_items(datasource) == []
assert pub.loggederror_class.count() == 1 assert pub.loggederror_class.count() == 1
@ -367,7 +367,7 @@ def test_jsonvalue_datasource_errors(pub):
) )
pub.loggederror_class.wipe() pub.loggederror_class.wipe()
plain_list = [{"id": "1", "text": "foo"}, {"id": "2", "text": ""}] plain_list = [{'id': '1', 'text': 'foo'}, {'id': '2', 'text': ''}]
datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list), 'record_on_errors': True} datasource = {'type': 'jsonvalue', 'value': json.dumps(plain_list), 'record_on_errors': True}
assert data_sources.get_items(datasource) == [] assert data_sources.get_items(datasource) == []
assert pub.loggederror_class.count() == 1 assert pub.loggederror_class.count() == 1
@ -384,7 +384,7 @@ def test_jsonvalue_datasource_errors(pub):
assert data_sources.get_items(datasource) == [] assert data_sources.get_items(datasource) == []
assert pub.loggederror_class.count() == 1 assert pub.loggederror_class.count() == 1
logged_error = pub.loggederror_class.select()[0] logged_error = pub.loggederror_class.select()[0]
assert logged_error.summary == "[DATASOURCE] JSON data source (None) gave a non usable result" assert logged_error.summary == '[DATASOURCE] JSON data source (None) gave a non usable result'
def test_json_datasource(pub, requests_pub): def test_json_datasource(pub, requests_pub):
@ -688,8 +688,8 @@ def test_json_datasource_bad_url(pub, error_email, http_requests, emails):
logged_error = pub.loggederror_class.select()[0] logged_error = pub.loggederror_class.select()[0]
assert logged_error.workflow_id is None assert logged_error.workflow_id is None
assert ( assert (
logged_error.summary == "[DATASOURCE] Error loading JSON data source " logged_error.summary == '[DATASOURCE] Error loading JSON data source '
"(error in HTTP request to http://remote.example.net/404 (status: 404))" '(error in HTTP request to http://remote.example.net/404 (status: 404))'
) )
datasource = { datasource = {
@ -706,7 +706,7 @@ def test_json_datasource_bad_url(pub, error_email, http_requests, emails):
assert logged_error.workflow_id is None assert logged_error.workflow_id is None
assert ( assert (
logged_error.summary logged_error.summary
== "[DATASOURCE] Error reading JSON data source output (Expecting value: line 1 column 1 (char 0))" == '[DATASOURCE] Error reading JSON data source output (Expecting value: line 1 column 1 (char 0))'
) )
datasource = { datasource = {
@ -720,7 +720,7 @@ def test_json_datasource_bad_url(pub, error_email, http_requests, emails):
assert pub.loggederror_class.count() == 3 assert pub.loggederror_class.count() == 3
logged_error = pub.loggederror_class.select(order_by='id')[2] logged_error = pub.loggederror_class.select(order_by='id')[2]
assert logged_error.workflow_id is None assert logged_error.workflow_id is None
assert logged_error.summary.startswith("[DATASOURCE] Error loading JSON data source (error") assert logged_error.summary.startswith('[DATASOURCE] Error loading JSON data source (error')
datasource = { datasource = {
'type': 'json', 'type': 'json',
@ -733,7 +733,7 @@ def test_json_datasource_bad_url(pub, error_email, http_requests, emails):
assert pub.loggederror_class.count() == 4 assert pub.loggederror_class.count() == 4
logged_error = pub.loggederror_class.select(order_by='id')[3] logged_error = pub.loggederror_class.select(order_by='id')[3]
assert logged_error.workflow_id is None assert logged_error.workflow_id is None
assert logged_error.summary == "[DATASOURCE] Error reading JSON data source output (err 1)" assert logged_error.summary == '[DATASOURCE] Error reading JSON data source output (err 1)'
def test_json_datasource_bad_url_scheme(pub, error_email, emails): def test_json_datasource_bad_url_scheme(pub, error_email, emails):
@ -751,7 +751,7 @@ def test_json_datasource_bad_url_scheme(pub, error_email, emails):
assert logged_error.workflow_id is None assert logged_error.workflow_id is None
assert ( assert (
logged_error.summary logged_error.summary
== "[DATASOURCE] Error loading JSON data source (invalid scheme in URL foo://bar)" == '[DATASOURCE] Error loading JSON data source (invalid scheme in URL foo://bar)'
) )
datasource = {'type': 'json', 'value': '/bla/blo', 'notify_on_errors': True, 'record_on_errors': True} datasource = {'type': 'json', 'value': '/bla/blo', 'notify_on_errors': True, 'record_on_errors': True}
@ -762,7 +762,7 @@ def test_json_datasource_bad_url_scheme(pub, error_email, emails):
logged_error = pub.loggederror_class.select(order_by='id')[1] logged_error = pub.loggederror_class.select(order_by='id')[1]
assert logged_error.workflow_id is None assert logged_error.workflow_id is None
assert ( assert (
logged_error.summary == "[DATASOURCE] Error loading JSON data source (invalid scheme in URL /bla/blo)" logged_error.summary == '[DATASOURCE] Error loading JSON data source (invalid scheme in URL /bla/blo)'
) )
@ -771,7 +771,7 @@ def test_json_datasource_bad_url_scheme(pub, error_email, emails):
def test_json_datasource_bad_qs_data(pub, error_email, emails, notify, record): def test_json_datasource_bad_qs_data(pub, error_email, emails, notify, record):
datasource = { datasource = {
'type': 'json', 'type': 'json',
'value': "https://whatever.com/json", 'value': 'https://whatever.com/json',
'qs_data': {'foo': '{% for invalid %}', 'bar': '{{ valid }}'}, 'qs_data': {'foo': '{% for invalid %}', 'bar': '{{ valid }}'},
'notify_on_errors': notify, 'notify_on_errors': notify,
'record_on_errors': record, 'record_on_errors': record,
@ -1121,8 +1121,8 @@ def test_geojson_datasource_bad_url(pub, http_requests, error_email, emails):
logged_error = pub.loggederror_class.select()[0] logged_error = pub.loggederror_class.select()[0]
assert logged_error.workflow_id is None assert logged_error.workflow_id is None
assert ( assert (
logged_error.summary == "[DATASOURCE] Error loading JSON data source " logged_error.summary == '[DATASOURCE] Error loading JSON data source '
"(error in HTTP request to http://remote.example.net/404 (status: 404))" '(error in HTTP request to http://remote.example.net/404 (status: 404))'
) )
datasource = { datasource = {
@ -1139,7 +1139,7 @@ def test_geojson_datasource_bad_url(pub, http_requests, error_email, emails):
assert logged_error.workflow_id is None assert logged_error.workflow_id is None
assert ( assert (
logged_error.summary logged_error.summary
== "[DATASOURCE] Error reading JSON data source output (Expecting value: line 1 column 1 (char 0))" == '[DATASOURCE] Error reading JSON data source output (Expecting value: line 1 column 1 (char 0))'
) )
datasource = { datasource = {
@ -1154,7 +1154,7 @@ def test_geojson_datasource_bad_url(pub, http_requests, error_email, emails):
assert pub.loggederror_class.count() == 3 assert pub.loggederror_class.count() == 3
logged_error = pub.loggederror_class.select(order_by='id')[2] logged_error = pub.loggederror_class.select(order_by='id')[2]
assert logged_error.workflow_id is None assert logged_error.workflow_id is None
assert logged_error.summary.startswith("[DATASOURCE] Error loading JSON data source (error") assert logged_error.summary.startswith('[DATASOURCE] Error loading JSON data source (error')
datasource = { datasource = {
'type': 'geojson', 'type': 'geojson',
@ -1167,7 +1167,7 @@ def test_geojson_datasource_bad_url(pub, http_requests, error_email, emails):
assert pub.loggederror_class.count() == 4 assert pub.loggederror_class.count() == 4
logged_error = pub.loggederror_class.select(order_by='id')[3] logged_error = pub.loggederror_class.select(order_by='id')[3]
assert logged_error.workflow_id is None assert logged_error.workflow_id is None
assert logged_error.summary == "[DATASOURCE] Error reading JSON data source output (err 1)" assert logged_error.summary == '[DATASOURCE] Error reading JSON data source output (err 1)'
datasource = { datasource = {
'type': 'geojson', 'type': 'geojson',
@ -1180,7 +1180,7 @@ def test_geojson_datasource_bad_url(pub, http_requests, error_email, emails):
assert pub.loggederror_class.count() == 5 assert pub.loggederror_class.count() == 5
logged_error = pub.loggederror_class.select(order_by='id')[4] logged_error = pub.loggederror_class.select(order_by='id')[4]
assert logged_error.workflow_id is None assert logged_error.workflow_id is None
assert logged_error.summary == "[DATASOURCE] Error reading JSON data source output (err_desc :()" assert logged_error.summary == '[DATASOURCE] Error reading JSON data source output (err_desc :()'
datasource = { datasource = {
'type': 'geojson', 'type': 'geojson',
@ -1198,7 +1198,7 @@ def test_geojson_datasource_bad_url(pub, http_requests, error_email, emails):
assert logged_error.workflow_id is None assert logged_error.workflow_id is None
assert ( assert (
logged_error.summary logged_error.summary
== "[DATASOURCE] Error reading JSON data source output (err_desc :(, err_class foo_bar, err bug)" == '[DATASOURCE] Error reading JSON data source output (err_desc :(, err_class foo_bar, err bug)'
) )
@ -1216,7 +1216,7 @@ def test_geojson_datasource_bad_url_scheme(pub, error_email, emails):
assert logged_error.workflow_id is None assert logged_error.workflow_id is None
assert ( assert (
logged_error.summary logged_error.summary
== "[DATASOURCE] Error loading JSON data source (invalid scheme in URL foo://bar)" == '[DATASOURCE] Error loading JSON data source (invalid scheme in URL foo://bar)'
) )
datasource = {'type': 'geojson', 'value': '/bla/blo', 'notify_on_errors': True, 'record_on_errors': True} datasource = {'type': 'geojson', 'value': '/bla/blo', 'notify_on_errors': True, 'record_on_errors': True}
@ -1227,7 +1227,7 @@ def test_geojson_datasource_bad_url_scheme(pub, error_email, emails):
logged_error = pub.loggederror_class.select(order_by='id')[1] logged_error = pub.loggederror_class.select(order_by='id')[1]
assert logged_error.workflow_id is None assert logged_error.workflow_id is None
assert ( assert (
logged_error.summary == "[DATASOURCE] Error loading JSON data source (invalid scheme in URL /bla/blo)" logged_error.summary == '[DATASOURCE] Error loading JSON data source (invalid scheme in URL /bla/blo)'
) )
@ -1335,7 +1335,7 @@ def test_data_source_unicode(pub):
NamedDataSource.wipe() NamedDataSource.wipe()
data_source = NamedDataSource(name='foobar') data_source = NamedDataSource(name='foobar')
data_source.data_source = {'type': 'json', 'value': "https://whatever.com/json"} data_source.data_source = {'type': 'json', 'value': 'https://whatever.com/json'}
data_source.store() data_source.store()
data_source2 = NamedDataSource.select()[0] data_source2 = NamedDataSource.select()[0]
@ -1343,12 +1343,12 @@ def test_data_source_unicode(pub):
with responses.RequestsMock() as rsps: with responses.RequestsMock() as rsps:
rsps.get( rsps.get(
'https://whatever.com/json', 'https://whatever.com/json',
json={"data": [{"id": 0, "text": "zéro"}, {"id": 1, "text": "uné"}, {"id": 2, "text": "deux"}]}, json={'data': [{'id': 0, 'text': 'zéro'}, {'id': 1, 'text': 'uné'}, {'id': 2, 'text': 'deux'}]},
) )
assert data_sources.get_items({'type': 'foobar'}) == [ assert data_sources.get_items({'type': 'foobar'}) == [
('0', 'zéro', '0', {"id": 0, "text": "zéro"}), ('0', 'zéro', '0', {'id': 0, 'text': 'zéro'}),
('1', 'uné', '1', {"id": 1, "text": "uné"}), ('1', 'uné', '1', {'id': 1, 'text': 'uné'}),
('2', 'deux', '2', {"id": 2, "text": "deux"}), ('2', 'deux', '2', {'id': 2, 'text': 'deux'}),
] ]
@ -1356,12 +1356,12 @@ def test_data_source_unicode(pub):
def test_data_source_signed(no_request_pub, qs_data): def test_data_source_signed(no_request_pub, qs_data):
NamedDataSource.wipe() NamedDataSource.wipe()
data_source = NamedDataSource(name='foobar') data_source = NamedDataSource(name='foobar')
data_source.data_source = {'type': 'json', 'value': "https://api.example.com/json"} data_source.data_source = {'type': 'json', 'value': 'https://api.example.com/json'}
data_source.qs_data = qs_data data_source.qs_data = qs_data
data_source.store() data_source.store()
with responses.RequestsMock() as rsps: with responses.RequestsMock() as rsps:
rsps.get('https://api.example.com/json', json={"data": [{"id": 0, "text": "zero"}]}) rsps.get('https://api.example.com/json', json={'data': [{'id': 0, 'text': 'zero'}]})
assert len(data_sources.get_items({'type': 'foobar'})) == 1 assert len(data_sources.get_items({'type': 'foobar'})) == 1
signed_url = rsps.calls[-1].request.url signed_url = rsps.calls[-1].request.url
assert signed_url.startswith('https://api.example.com/json?') assert signed_url.startswith('https://api.example.com/json?')
@ -1377,10 +1377,10 @@ def test_data_source_signed(no_request_pub, qs_data):
assert querystring['arg1'][0] == 'val1' assert querystring['arg1'][0] == 'val1'
assert querystring['arg2'][0] == 'val2' assert querystring['arg2'][0] == 'val2'
data_source.data_source = {'type': 'json', 'value': "https://api.example.com/json?foo=bar"} data_source.data_source = {'type': 'json', 'value': 'https://api.example.com/json?foo=bar'}
data_source.store() data_source.store()
with responses.RequestsMock() as rsps: with responses.RequestsMock() as rsps:
rsps.get('https://api.example.com/json', json={"data": [{"id": 0, "text": "zero"}]}) rsps.get('https://api.example.com/json', json={'data': [{'id': 0, 'text': 'zero'}]})
assert len(data_sources.get_items({'type': 'foobar'})) == 1 assert len(data_sources.get_items({'type': 'foobar'})) == 1
signed_url = rsps.calls[-1].request.url signed_url = rsps.calls[-1].request.url
assert signed_url.startswith('https://api.example.com/json?') assert signed_url.startswith('https://api.example.com/json?')
@ -1396,10 +1396,10 @@ def test_data_source_signed(no_request_pub, qs_data):
assert querystring['arg1'][0] == 'val1' assert querystring['arg1'][0] == 'val1'
assert querystring['arg2'][0] == 'val2' assert querystring['arg2'][0] == 'val2'
data_source.data_source = {'type': 'json', 'value': "https://no-secret.example.com/json"} data_source.data_source = {'type': 'json', 'value': 'https://no-secret.example.com/json'}
data_source.store() data_source.store()
with responses.RequestsMock() as rsps: with responses.RequestsMock() as rsps:
rsps.get('https://no-secret.example.com/json', json={"data": [{"id": 0, "text": "zero"}]}) rsps.get('https://no-secret.example.com/json', json={'data': [{'id': 0, 'text': 'zero'}]})
assert len(data_sources.get_items({'type': 'foobar'})) == 1 assert len(data_sources.get_items({'type': 'foobar'})) == 1
unsigned_url = rsps.calls[-1].request.url unsigned_url = rsps.calls[-1].request.url
if qs_data: if qs_data:
@ -1407,10 +1407,10 @@ def test_data_source_signed(no_request_pub, qs_data):
else: else:
assert unsigned_url == 'https://no-secret.example.com/json' assert unsigned_url == 'https://no-secret.example.com/json'
data_source.data_source = {'type': 'json', 'value': "https://no-secret.example.com/json?foo=bar"} data_source.data_source = {'type': 'json', 'value': 'https://no-secret.example.com/json?foo=bar'}
data_source.store() data_source.store()
with responses.RequestsMock() as rsps: with responses.RequestsMock() as rsps:
rsps.get('https://no-secret.example.com/json', json={"data": [{"id": 0, "text": "zero"}]}) rsps.get('https://no-secret.example.com/json', json={'data': [{'id': 0, 'text': 'zero'}]})
assert len(data_sources.get_items({'type': 'foobar'})) == 1 assert len(data_sources.get_items({'type': 'foobar'})) == 1
unsigned_url = rsps.calls[-1].request.url unsigned_url = rsps.calls[-1].request.url
if qs_data: if qs_data:
@ -1583,7 +1583,7 @@ def test_data_source_in_template(pub):
pub.substitutions.feed(formdata) pub.substitutions.feed(formdata)
data_source = NamedDataSource(name='foobar') data_source = NamedDataSource(name='foobar')
data_source.data_source = {'type': 'json', 'value': "https://example.invalid/json?t={{form_var_foo}}"} data_source.data_source = {'type': 'json', 'value': 'https://example.invalid/json?t={{form_var_foo}}'}
data_source.store() data_source.store()
with pub.complex_data(): with pub.complex_data():
@ -1591,10 +1591,10 @@ def test_data_source_in_template(pub):
rsps.get( rsps.get(
'https://example.invalid/json', 'https://example.invalid/json',
json={ json={
"data": [{"id": 0, "text": "zéro"}, {"id": 1, "text": "uné"}, {"id": 2, "text": "deux"}], 'data': [{'id': 0, 'text': 'zéro'}, {'id': 1, 'text': 'uné'}, {'id': 2, 'text': 'deux'}],
"meta": { 'meta': {
"foo": "bar", 'foo': 'bar',
"blah": {"a": "b", "c": "d"}, 'blah': {'a': 'b', 'c': 'd'},
}, },
}, },
) )

View File

@ -30,38 +30,38 @@ def pub(request):
AGENDA_EVENTS_DATA = [ AGENDA_EVENTS_DATA = [
{ {
"api": { 'api': {
"datetimes_url": "http://chrono.example.net/api/agenda/events-A/datetimes/", 'datetimes_url': 'http://chrono.example.net/api/agenda/events-A/datetimes/',
}, },
"id": "events-A", 'id': 'events-A',
"kind": "events", 'kind': 'events',
"text": "Events A", 'text': 'Events A',
}, },
{ {
"api": { 'api': {
"datetimes_url": "http://chrono.example.net/api/agenda/events-B/datetimes/", 'datetimes_url': 'http://chrono.example.net/api/agenda/events-B/datetimes/',
}, },
"id": "events-B", 'id': 'events-B',
"kind": "events", 'kind': 'events',
"text": "Events B", 'text': 'Events B',
}, },
] ]
AGENDA_MEETINGS_DATA = [ AGENDA_MEETINGS_DATA = [
{ {
"api": {"meetings_url": "http://chrono.example.net/api/agenda/meetings-A/meetings/"}, 'api': {'meetings_url': 'http://chrono.example.net/api/agenda/meetings-A/meetings/'},
"id": "meetings-A", 'id': 'meetings-A',
"kind": "meetings", 'kind': 'meetings',
"text": "Meetings A", 'text': 'Meetings A',
}, },
{ {
"api": { 'api': {
"meetings_url": "http://chrono.example.net/api/agenda/virtual-B/meetings/", 'meetings_url': 'http://chrono.example.net/api/agenda/virtual-B/meetings/',
}, },
"id": "virtual-B", 'id': 'virtual-B',
"kind": "virtual", 'kind': 'virtual',
"text": "Virtual B", 'text': 'Virtual B',
}, },
] ]
@ -69,30 +69,30 @@ AGENDA_MEETINGS_DATA = [
AGENDA_MEETING_TYPES_DATA = { AGENDA_MEETING_TYPES_DATA = {
'meetings-A': [ 'meetings-A': [
{ {
"api": { 'api': {
"datetimes_url": "http://chrono.example.net/api/agenda/meetings-A/meetings/mt-1/datetimes/" 'datetimes_url': 'http://chrono.example.net/api/agenda/meetings-A/meetings/mt-1/datetimes/'
}, },
"id": "mt-1", 'id': 'mt-1',
"text": "MT 1", 'text': 'MT 1',
"duration": 30, 'duration': 30,
}, },
{ {
"api": { 'api': {
"datetimes_url": "http://chrono.example.net/api/agenda/meetings-A/meetings/mt-2/datetimes/" 'datetimes_url': 'http://chrono.example.net/api/agenda/meetings-A/meetings/mt-2/datetimes/'
}, },
"id": "mt-2", 'id': 'mt-2',
"text": "MT 2", 'text': 'MT 2',
"duration": 60, 'duration': 60,
}, },
], ],
'virtual-B': [ 'virtual-B': [
{ {
"api": { 'api': {
"datetimes_url": "http://chrono.example.net/api/agenda/virtual-B/meetings/mt-3/datetimes/" 'datetimes_url': 'http://chrono.example.net/api/agenda/virtual-B/meetings/mt-3/datetimes/'
}, },
"id": "mt-3", 'id': 'mt-3',
"text": "MT 3", 'text': 'MT 3',
"duration": 60, 'duration': 60,
}, },
], ],
} }
@ -124,7 +124,7 @@ def test_collect_agenda_data(pub, chrono_url):
pub.load_site_options() pub.load_site_options()
NamedDataSource.wipe() NamedDataSource.wipe()
responses.get('http://chrono.example.net/api/agenda/', json={"data": []}) responses.get('http://chrono.example.net/api/agenda/', json={'data': []})
assert collect_agenda_data(pub) == [] assert collect_agenda_data(pub) == []
assert len(responses.calls) == 1 assert len(responses.calls) == 1
assert responses.calls[-1].request.url == 'http://chrono.example.net/api/agenda/' assert responses.calls[-1].request.url == 'http://chrono.example.net/api/agenda/'
@ -137,7 +137,7 @@ def test_collect_agenda_data(pub, chrono_url):
# events agenda # events agenda
responses.reset() responses.reset()
responses.get('http://chrono.example.net/api/agenda/', json={"data": AGENDA_EVENTS_DATA}) responses.get('http://chrono.example.net/api/agenda/', json={'data': AGENDA_EVENTS_DATA})
assert collect_agenda_data(pub) == [ assert collect_agenda_data(pub) == [
{ {
@ -156,14 +156,14 @@ def test_collect_agenda_data(pub, chrono_url):
# meetings agenda # meetings agenda
responses.reset() responses.reset()
responses.get('http://chrono.example.net/api/agenda/', json={"data": AGENDA_MEETINGS_DATA}) responses.get('http://chrono.example.net/api/agenda/', json={'data': AGENDA_MEETINGS_DATA})
responses.get( responses.get(
'http://chrono.example.net/api/agenda/meetings-A/meetings/', 'http://chrono.example.net/api/agenda/meetings-A/meetings/',
json={"data": AGENDA_MEETING_TYPES_DATA['meetings-A']}, json={'data': AGENDA_MEETING_TYPES_DATA['meetings-A']},
) )
responses.get( responses.get(
'http://chrono.example.net/api/agenda/virtual-B/meetings/', 'http://chrono.example.net/api/agenda/virtual-B/meetings/',
json={"data": AGENDA_MEETING_TYPES_DATA['virtual-B']}, json={'data': AGENDA_MEETING_TYPES_DATA['virtual-B']},
) )
assert collect_agenda_data(pub) == [ assert collect_agenda_data(pub) == [
@ -209,10 +209,10 @@ def test_collect_agenda_data(pub, chrono_url):
assert responses.calls[2].request.url == 'http://chrono.example.net/api/agenda/virtual-B/meetings/' assert responses.calls[2].request.url == 'http://chrono.example.net/api/agenda/virtual-B/meetings/'
# if meeting types could not be collected # if meeting types could not be collected
responses.reset() responses.reset()
responses.get('http://chrono.example.net/api/agenda/', json={"data": AGENDA_MEETINGS_DATA}) responses.get('http://chrono.example.net/api/agenda/', json={'data': AGENDA_MEETINGS_DATA})
responses.get( responses.get(
'http://chrono.example.net/api/agenda/meetings-A/meetings/', 'http://chrono.example.net/api/agenda/meetings-A/meetings/',
json={"data": AGENDA_MEETING_TYPES_DATA['meetings-A']}, json={'data': AGENDA_MEETING_TYPES_DATA['meetings-A']},
) )
responses.get('http://chrono.example.net/api/agenda/virtual-B/meetings/', body=ConnectionError('...')) responses.get('http://chrono.example.net/api/agenda/virtual-B/meetings/', body=ConnectionError('...'))
@ -223,7 +223,7 @@ def test_collect_agenda_data(pub, chrono_url):
assert responses.calls[2].request.url == 'http://chrono.example.net/api/agenda/virtual-B/meetings/' assert responses.calls[2].request.url == 'http://chrono.example.net/api/agenda/virtual-B/meetings/'
responses.reset() responses.reset()
responses.get('http://chrono.example.net/api/agenda/', json={"data": AGENDA_MEETINGS_DATA}) responses.get('http://chrono.example.net/api/agenda/', json={'data': AGENDA_MEETINGS_DATA})
responses.get('http://chrono.example.net/api/agenda/meetings-A/meetings/', body=ConnectionError('...')) responses.get('http://chrono.example.net/api/agenda/meetings-A/meetings/', body=ConnectionError('...'))
assert collect_agenda_data(pub) is None assert collect_agenda_data(pub) is None

View File

@ -494,9 +494,9 @@ def test_get_json_export_dict_evolution(pub, local_user):
evo.who = '_submitter' evo.who = '_submitter'
d.evolution = [evo] d.evolution = [evo]
d.store() d.store()
evo.add_part(JournalEvolutionPart(d, "ok", None)) evo.add_part(JournalEvolutionPart(d, 'ok', None))
evo.add_part(JournalWsCallErrorPart("summary", "label", "data")) evo.add_part(JournalWsCallErrorPart('summary', 'label', 'data'))
evo.add_part(JournalAssignationErrorPart("summary", "label")) evo.add_part(JournalAssignationErrorPart('summary', 'label'))
evo = Evolution() evo = Evolution()
evo.time = time.localtime() evo.time = time.localtime()
evo.status = 'wf-%s' % st_finished.id evo.status = 'wf-%s' % st_finished.id

View File

@ -127,7 +127,7 @@ def test_process_notification_role(pub):
assert old_role.name == 'Service état civil' assert old_role.name == 'Service état civil'
assert old_role.uuid == uuid2 assert old_role.uuid == uuid2
assert old_role.slug == 'service-ett-civil' assert old_role.slug == 'service-ett-civil'
assert old_role.details == "Rôle du service état civil" assert old_role.details == 'Rôle du service état civil'
assert old_role.emails == ['etat-civil@example.com'] assert old_role.emails == ['etat-civil@example.com']
assert old_role.emails_to_members is True assert old_role.emails_to_members is True
assert old_role.allows_backoffice_access is False assert old_role.allows_backoffice_access is False
@ -136,7 +136,7 @@ def test_process_notification_role(pub):
assert new_role.name == 'Service enfance' assert new_role.name == 'Service enfance'
assert new_role.slug == 'service-enfance' assert new_role.slug == 'service-enfance'
assert new_role.uuid == uuid1 assert new_role.uuid == uuid1
assert new_role.details == "Rôle du service petite enfance" assert new_role.details == 'Rôle du service petite enfance'
assert new_role.emails == ['petite-enfance@example.com'] assert new_role.emails == ['petite-enfance@example.com']
assert new_role.emails_to_members is False assert new_role.emails_to_members is False
assert new_role.allows_backoffice_access is False assert new_role.allows_backoffice_access is False

View File

@ -18,10 +18,10 @@ from wcs.sql import cleanup_connection
from .utilities import clean_temporary_pub, create_temporary_pub from .utilities import clean_temporary_pub, create_temporary_pub
CONFIG = { CONFIG = {
"postgresql": { 'postgresql': {
"createdb-connection-params": {"database": "postgres", "user": os.environ['USER']}, 'createdb-connection-params': {'database': 'postgres', 'user': os.environ['USER']},
"database-template-name": "%s", 'database-template-name': '%s',
"user": os.environ['USER'], 'user': os.environ['USER'],
} }
} }

View File

@ -94,7 +94,7 @@ def test_snapshot_basics(pub):
assert snapshot2.instance # possible to restore assert snapshot2.instance # possible to restore
# no diff with latest snap but label is given # no diff with latest snap but label is given
pub.snapshot_class.snap(instance=formdef, label="foo bar") pub.snapshot_class.snap(instance=formdef, label='foo bar')
assert pub.snapshot_class.count() == 3 assert pub.snapshot_class.count() == 3
snapshot3 = pub.snapshot_class.get_latest('formdef', formdef.id) snapshot3 = pub.snapshot_class.get_latest('formdef', formdef.id)
assert snapshot3.serialization is None assert snapshot3.serialization is None
@ -114,7 +114,7 @@ def test_snapshot_basics(pub):
assert snapshot4.instance # possible to restore assert snapshot4.instance # possible to restore
# no diff with latest snap but label is given # no diff with latest snap but label is given
pub.snapshot_class.snap(instance=formdef, label="foo bar") pub.snapshot_class.snap(instance=formdef, label='foo bar')
assert pub.snapshot_class.count() == 5 assert pub.snapshot_class.count() == 5
snapshot5 = pub.snapshot_class.get_latest('formdef', formdef.id) snapshot5 = pub.snapshot_class.get_latest('formdef', formdef.id)
assert snapshot5.serialization is None assert snapshot5.serialization is None

View File

@ -6,13 +6,13 @@ from wcs.qommon.misc import strftime
def test(): def test():
# Make sure that the day names are in order # Make sure that the day names are in order
# from 1/1/1800 until 1/1/2100 # from 1/1/1800 until 1/1/2100
s = strftime("%Y has the same days as 1980 and 2008", datetime.date(1800, 9, 23)) s = strftime('%Y has the same days as 1980 and 2008', datetime.date(1800, 9, 23))
if s != "1800 has the same days as 1980 and 2008": if s != '1800 has the same days as 1980 and 2008':
raise AssertionError(s) raise AssertionError(s)
days = [] days = []
for i in range(1, 10): for i in range(1, 10):
days.append(datetime.date(2000, 1, i).strftime("%A")) days.append(datetime.date(2000, 1, i).strftime('%A'))
nextday = {} nextday = {}
for i in range(8): for i in range(8):
nextday[days[i]] = days[i + 1] nextday[days[i]] = days[i + 1]

View File

@ -222,7 +222,7 @@ def test_unaccent_templatetag(pub):
assert tmpl.render({'foo': 42}) == '' assert tmpl.render({'foo': 42}) == ''
assert pub.loggederror_class.count() == 1 assert pub.loggederror_class.count() == 1
logged_error = pub.loggederror_class.select(order_by='id')[0] logged_error = pub.loggederror_class.select(order_by='id')[0]
assert logged_error.summary == "Failed to apply unaccent filter on value (42)" assert logged_error.summary == 'Failed to apply unaccent filter on value (42)'
assert tmpl.render({'foo': ['a', 'z']}) == '' assert tmpl.render({'foo': ['a', 'z']}) == ''
assert pub.loggederror_class.count() == 2 assert pub.loggederror_class.count() == 2
logged_error = pub.loggederror_class.select(order_by='id')[1] logged_error = pub.loggederror_class.select(order_by='id')[1]
@ -485,8 +485,8 @@ def test_mathematics_templatetag():
# using strings # using strings
assert tmpl.render({'term1': '1.1', 'term2': 0}) == '1.1' assert tmpl.render({'term1': '1.1', 'term2': 0}) == '1.1'
assert tmpl.render({'term1': 'not a number', 'term2': 1.2}) == '' assert tmpl.render({'term1': 'not a number', 'term2': 1.2}) == ''
assert tmpl.render({'term1': 0.3, 'term2': "1"}) == '1.3' assert tmpl.render({'term1': 0.3, 'term2': '1'}) == '1.3'
assert tmpl.render({'term1': 1.4, 'term2': "not a number"}) == '' assert tmpl.render({'term1': 1.4, 'term2': 'not a number'}) == ''
# add # add
assert tmpl.render({'term1': 4, 'term2': -0.9}) == '3.1' assert tmpl.render({'term1': 4, 'term2': -0.9}) == '3.1'
@ -717,7 +717,7 @@ def test_get_filter():
assert tmpl.render({'foo': ['bar', 'baz']}) == 'bar' assert tmpl.render({'foo': ['bar', 'baz']}) == 'bar'
tmpl = Template('{{ foo|get:0|default_if_none:"" }}') tmpl = Template('{{ foo|get:0|default_if_none:"" }}')
assert tmpl.render({'foo': ""}) == '' assert tmpl.render({'foo': ''}) == ''
assert tmpl.render({'foo': None}) == '' assert tmpl.render({'foo': None}) == ''
assert tmpl.render({'foo': 23}) == '' assert tmpl.render({'foo': 23}) == ''

View File

@ -109,7 +109,7 @@ def test_form_file_field_upload_storage(wscall, pub):
wscall.return_value = ( wscall.return_value = (
None, None,
200, 200,
json.dumps({"err": 0, "data": {"redirect_url": "https://crypto.example.net/"}}), json.dumps({'err': 0, 'data': {'redirect_url': 'https://crypto.example.net/'}}),
) )
with open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg'), 'rb') as fd: with open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg'), 'rb') as fd:
@ -143,11 +143,11 @@ def test_form_file_field_upload_storage(wscall, pub):
200, 200,
json.dumps( json.dumps(
{ {
"err": 0, 'err': 0,
"data": { 'data': {
"redirect_url": "https://crypto.example.net/", 'redirect_url': 'https://crypto.example.net/',
"backoffice_redirect_url": None, 'backoffice_redirect_url': None,
"frontoffice_redirect_url": None, 'frontoffice_redirect_url': None,
}, },
} }
), ),
@ -180,7 +180,7 @@ def test_form_file_field_upload_storage(wscall, pub):
wscall.return_value = ( wscall.return_value = (
None, None,
200, 200,
json.dumps({"err": 0, "data": {"redirect_url": "https://crypto.example.net/"}}), json.dumps({'err': 0, 'data': {'redirect_url': 'https://crypto.example.net/'}}),
) )
resp = get_app(pub).get('/test/') resp = get_app(pub).get('/test/')
resp.forms[0]['f0$file'] = upload_0 resp.forms[0]['f0$file'] = upload_0
@ -206,7 +206,7 @@ def test_form_file_field_upload_storage(wscall, pub):
wscall.return_value = ( wscall.return_value = (
None, None,
200, 200,
json.dumps({"err": 0, "data": {"redirect_url": "https://crypto.example.net/"}}), json.dumps({'err': 0, 'data': {'redirect_url': 'https://crypto.example.net/'}}),
) )
resp = get_app(pub).get('/test/') resp = get_app(pub).get('/test/')
resp.forms[0]['f0$file'] = upload_0 resp.forms[0]['f0$file'] = upload_0
@ -295,7 +295,7 @@ def test_remoteopaque_in_attachmentevolutionpart(wscall, pub):
wscall.return_value = ( wscall.return_value = (
None, None,
200, 200,
json.dumps({"err": 0, "data": {"redirect_url": "https://crypto.example.net/"}}), json.dumps({'err': 0, 'data': {'redirect_url': 'https://crypto.example.net/'}}),
) )
with open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg'), 'rb') as fd: with open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg'), 'rb') as fd:

View File

@ -349,22 +349,22 @@ class HttpRequestsMocking:
self.requests_mock.get('http://remote.example.net/404', status=404, body='page not found') self.requests_mock.get('http://remote.example.net/404', status=404, body='page not found')
self.requests_mock.get('http://remote.example.net/404-json', status=404, json={'err': 'not-found'}) self.requests_mock.get('http://remote.example.net/404-json', status=404, json={'err': 'not-found'})
self.requests_mock.get('http://remote.example.net/500', status=500, body='internal server error') self.requests_mock.get('http://remote.example.net/500', status=500, body='internal server error')
self.requests_mock.get('http://remote.example.net/json', json={"foo": "bar"}) self.requests_mock.get('http://remote.example.net/json', json={'foo': 'bar'})
self.requests_mock.post('http://remote.example.net/json', json={"foo": "bar"}) self.requests_mock.post('http://remote.example.net/json', json={'foo': 'bar'})
self.requests_mock.delete('http://remote.example.net/json', json={"foo": "bar"}) self.requests_mock.delete('http://remote.example.net/json', json={'foo': 'bar'})
self.requests_mock.get( self.requests_mock.get(
'http://remote.example.net/json-list', json={"data": [{"id": "a", "text": "b"}]} 'http://remote.example.net/json-list', json={'data': [{'id': 'a', 'text': 'b'}]}
) )
self.requests_mock.get( self.requests_mock.get(
'http://remote.example.net/json-list-extra', 'http://remote.example.net/json-list-extra',
json={"data": [{"id": "a", "text": "b", "foo": "bar"}]}, json={'data': [{'id': 'a', 'text': 'b', 'foo': 'bar'}]},
) )
self.requests_mock.get( self.requests_mock.get(
'http://remote.example.net/json-list-extra-with-disabled', 'http://remote.example.net/json-list-extra-with-disabled',
json={ json={
"data": [ 'data': [
{"id": "a", "text": "b", "foo": "bar"}, {'id': 'a', 'text': 'b', 'foo': 'bar'},
{"id": "c", "text": "d", "foo": "baz", "disabled": True}, {'id': 'c', 'text': 'd', 'foo': 'baz', 'disabled': True},
] ]
}, },
) )
@ -377,16 +377,16 @@ class HttpRequestsMocking:
content_type='text/xml', content_type='text/xml',
headers={'x-error-code': '1'}, headers={'x-error-code': '1'},
) )
self.requests_mock.get('http://remote.example.net/json-err0', json={"data": "foo", "err": 0}) self.requests_mock.get('http://remote.example.net/json-err0', json={'data': 'foo', 'err': 0})
self.requests_mock.get('http://remote.example.net/json-err0int', json={"data": "foo", "err": "0"}) self.requests_mock.get('http://remote.example.net/json-err0int', json={'data': 'foo', 'err': '0'})
self.requests_mock.get('http://remote.example.net/json-err1', json={"data": "", "err": 1}) self.requests_mock.get('http://remote.example.net/json-err1', json={'data': '', 'err': 1})
self.requests_mock.get('http://remote.example.net/json-err1int', json={"data": "", "err": "1"}) self.requests_mock.get('http://remote.example.net/json-err1int', json={'data': '', 'err': '1'})
self.requests_mock.get( self.requests_mock.get(
'http://remote.example.net/json-err1-with-desc', json={'data': '', 'err': 1, 'err_desc': ':('} 'http://remote.example.net/json-err1-with-desc', json={'data': '', 'err': 1, 'err_desc': ':('}
) )
self.requests_mock.get('http://remote.example.net/json-errstr', json={"data": "", "err": "bug"}) self.requests_mock.get('http://remote.example.net/json-errstr', json={'data': '', 'err': 'bug'})
self.requests_mock.get( self.requests_mock.get(
'http://remote.example.net/json-list-err1', json={"data": [{"id": "a", "text": "b"}], "err": 1} 'http://remote.example.net/json-list-err1', json={'data': [{'id': 'a', 'text': 'b'}], 'err': 1}
) )
self.requests_mock.get( self.requests_mock.get(
'http://remote.example.net/json-list-err1bis', 'http://remote.example.net/json-list-err1bis',
@ -407,14 +407,14 @@ class HttpRequestsMocking:
) )
self.requests_mock.get('http://remote.example.net/json-errstr', json={'data': '', 'err': 'bug'}) self.requests_mock.get('http://remote.example.net/json-errstr', json={'data': '', 'err': 'bug'})
self.requests_mock.get( self.requests_mock.get(
'http://remote.example.net/json-errheader0', json={"foo": "bar"}, headers={'x-error-code': '0'} 'http://remote.example.net/json-errheader0', json={'foo': 'bar'}, headers={'x-error-code': '0'}
) )
self.requests_mock.get( self.requests_mock.get(
'http://remote.example.net/json-errheader1', json={"foo": "bar"}, headers={'x-error-code': '1'} 'http://remote.example.net/json-errheader1', json={'foo': 'bar'}, headers={'x-error-code': '1'}
) )
self.requests_mock.get( self.requests_mock.get(
'http://remote.example.net/json-errheaderstr', 'http://remote.example.net/json-errheaderstr',
json={"foo": "bar"}, json={'foo': 'bar'},
headers={'x-error-code': 'bug'}, headers={'x-error-code': 'bug'},
) )
self.requests_mock.get( self.requests_mock.get(

View File

@ -2948,7 +2948,7 @@ def test_sms_with_passerelle(pub):
with mock.patch('wcs.wscalls.get_secret_and_orig') as mocked_secret_and_orig: with mock.patch('wcs.wscalls.get_secret_and_orig') as mocked_secret_and_orig:
mocked_secret_and_orig.return_value = ('secret', 'localhost') mocked_secret_and_orig.return_value = ('secret', 'localhost')
with responses.RequestsMock() as rsps: with responses.RequestsMock() as rsps:
rsps.post('http://passerelle.example.com/send', status=400, json={"err": 1}) rsps.post('http://passerelle.example.com/send', status=400, json={'err': 1})
item.perform(formdata) item.perform(formdata)
assert pub.loggederror_class.count() == 1 assert pub.loggederror_class.count() == 1
assert pub.loggederror_class.select()[0].summary == 'Could not send SMS' assert pub.loggederror_class.select()[0].summary == 'Could not send SMS'

View File

@ -913,7 +913,7 @@ def test_set_backoffice_field_items(pub):
item = SetBackofficeFieldsWorkflowStatusItem() item = SetBackofficeFieldsWorkflowStatusItem()
item.parent = st1 item.parent = st1
item.fields = [{'field_id': 'bo1', 'value': "=form_var_items_raw"}] item.fields = [{'field_id': 'bo1', 'value': '=form_var_items_raw'}]
item.perform(formdata) item.perform(formdata)
assert formdata.data['bo1'] == ['a', 'c'] assert formdata.data['bo1'] == ['a', 'c']
@ -932,7 +932,7 @@ def test_set_backoffice_field_items(pub):
pub.substitutions.reset() pub.substitutions.reset()
pub.substitutions.feed(formdata) pub.substitutions.feed(formdata)
item.fields = [{'field_id': 'bo1', 'value': "{{form_var_items_raw}}"}] item.fields = [{'field_id': 'bo1', 'value': '{{form_var_items_raw}}'}]
item.perform(formdata) item.perform(formdata)
# using a single int # using a single int
@ -959,7 +959,7 @@ def test_set_backoffice_field_items(pub):
item = SetBackofficeFieldsWorkflowStatusItem() item = SetBackofficeFieldsWorkflowStatusItem()
item.parent = st1 item.parent = st1
item.fields = [{'field_id': 'bo1', 'value': "=2"}] item.fields = [{'field_id': 'bo1', 'value': '=2'}]
item.perform(formdata) item.perform(formdata)
formdata = formdef.data_class().get(formdata.id) formdata = formdef.data_class().get(formdata.id)
@ -973,7 +973,7 @@ def test_set_backoffice_field_items(pub):
pub.loggederror_class.wipe() pub.loggederror_class.wipe()
item = SetBackofficeFieldsWorkflowStatusItem() item = SetBackofficeFieldsWorkflowStatusItem()
item.parent = st1 item.parent = st1
item.fields = [{'field_id': 'bo1', 'value': "=Ellipsis"}] item.fields = [{'field_id': 'bo1', 'value': '=Ellipsis'}]
item.perform(formdata) item.perform(formdata)
assert pub.loggederror_class.count() == 1 assert pub.loggederror_class.count() == 1
logged_error = pub.loggederror_class.select()[0] logged_error = pub.loggederror_class.select()[0]
@ -982,7 +982,7 @@ def test_set_backoffice_field_items(pub):
# using a string with multiple values # using a string with multiple values
item = SetBackofficeFieldsWorkflowStatusItem() item = SetBackofficeFieldsWorkflowStatusItem()
item.parent = st1 item.parent = st1
item.fields = [{'field_id': 'bo1', 'value': "1|3"}] item.fields = [{'field_id': 'bo1', 'value': '1|3'}]
item.perform(formdata) item.perform(formdata)
formdata = formdef.data_class().get(formdata.id) formdata = formdef.data_class().get(formdata.id)
@ -1017,7 +1017,7 @@ def test_set_backoffice_field_date(pub):
item = SetBackofficeFieldsWorkflowStatusItem() item = SetBackofficeFieldsWorkflowStatusItem()
item.parent = st1 item.parent = st1
item.fields = [{'field_id': 'bo1', 'value': "=utils.today()"}] item.fields = [{'field_id': 'bo1', 'value': '=utils.today()'}]
item.perform(formdata) item.perform(formdata)
formdata = formdef.data_class().get(formdata.id) formdata = formdef.data_class().get(formdata.id)
@ -1035,7 +1035,7 @@ def test_set_backoffice_field_date(pub):
item = SetBackofficeFieldsWorkflowStatusItem() item = SetBackofficeFieldsWorkflowStatusItem()
item.parent = st1 item.parent = st1
item.fields = [{'field_id': 'bo1', 'value': "23/3/2017"}] item.fields = [{'field_id': 'bo1', 'value': '23/3/2017'}]
item.perform(formdata) item.perform(formdata)
formdata = formdef.data_class().get(formdata.id) formdata = formdef.data_class().get(formdata.id)
@ -1478,7 +1478,7 @@ def test_set_backoffice_field_immediate_use(http_requests, pub):
item.fields = [ item.fields = [
{'field_id': 'bo1', 'value': 'X{{form_var_string}}X'}, {'field_id': 'bo1', 'value': 'X{{form_var_string}}X'},
{'field_id': 'bo2', 'value': "Y{{form_var_backoffice_blah}}Y"}, {'field_id': 'bo2', 'value': 'Y{{form_var_backoffice_blah}}Y'},
] ]
pub.substitutions.reset() pub.substitutions.reset()
pub.substitutions.feed(formdata) pub.substitutions.feed(formdata)

View File

@ -440,7 +440,7 @@ def test_email_attachments(pub, emails):
assert emails.emails['foobar']['msg'].get_payload(2).get_content_type() == 'application/json' assert emails.emails['foobar']['msg'].get_payload(2).get_content_type() == 'application/json'
payload1 = emails.emails['foobar']['msg'].get_payload(1) payload1 = emails.emails['foobar']['msg'].get_payload(1)
payload2 = emails.emails['foobar']['msg'].get_payload(2) payload2 = emails.emails['foobar']['msg'].get_payload(2)
assert payload1.get_payload(decode=True) == b"Hello world" assert payload1.get_payload(decode=True) == b'Hello world'
assert json.loads(force_str(payload2.get_payload(decode=True))) == {'hello': 'world'} assert json.loads(force_str(payload2.get_payload(decode=True))) == {'hello': 'world'}
# check with templates # check with templates

View File

@ -63,7 +63,7 @@ def test_wscall_record_errors(pub):
# error with bytes that can be stored as string # error with bytes that can be stored as string
with responses.RequestsMock() as rsps: with responses.RequestsMock() as rsps:
rsps.get('http://test', status=404, body=b"test bytes") rsps.get('http://test', status=404, body=b'test bytes')
wscall.perform(formdata) wscall.perform(formdata)
assert formdata.evolution[-1].parts[-1].get_json_export_dict() == { assert formdata.evolution[-1].parts[-1].get_json_export_dict() == {
'type': 'wscall-error', 'type': 'wscall-error',

View File

@ -14,7 +14,7 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>. # along with this program; if not, see <http://www.gnu.org/licenses/>.
APP_DIR = "/var/lib/wcs" APP_DIR = '/var/lib/wcs'
DATA_DIR = "/usr/share/wcs" DATA_DIR = '/usr/share/wcs'
ERROR_LOG = None ERROR_LOG = None
REDIRECT_ON_UNKNOWN_VHOST = None REDIRECT_ON_UNKNOWN_VHOST = None

View File

@ -188,7 +188,7 @@ class FieldDefPage(Directory):
if to_be_deleted: if to_be_deleted:
form.add(CheckboxWidget, 'delete_fields', title=_('Also remove all fields from the page')) form.add(CheckboxWidget, 'delete_fields', title=_('Also remove all fields from the page'))
form.add_submit('delete', _('Delete')) form.add_submit('delete', _('Delete'))
form.add_submit("cancel", _("Cancel")) form.add_submit('cancel', _('Cancel'))
if form.get_widget('cancel').parse(): if form.get_widget('cancel').parse():
return self.redirect_field_anchor(self.field) return self.redirect_field_anchor(self.field)
if not form.is_submitted() or form.has_errors(): if not form.is_submitted() or form.has_errors():
@ -251,7 +251,7 @@ class FieldDefPage(Directory):
if page_fields: if page_fields:
form.add(CheckboxWidget, 'duplicate_fields', title=_('Also duplicate all fields of the page')) form.add(CheckboxWidget, 'duplicate_fields', title=_('Also duplicate all fields of the page'))
form.add_submit('submit', _('Duplicate')) form.add_submit('submit', _('Duplicate'))
form.add_submit("cancel", _("Cancel")) form.add_submit('cancel', _('Cancel'))
if form.get_widget('cancel').parse(): if form.get_widget('cancel').parse():
return self.redirect_field_anchor(self.field) return self.redirect_field_anchor(self.field)
if not form.is_submitted() or form.has_errors(): if not form.is_submitted() or form.has_errors():

View File

@ -31,11 +31,11 @@ class RoleUI:
self.role = get_publisher().role_class() self.role = get_publisher().role_class()
def get_form(self): def get_form(self):
form = Form(enctype="multipart/form-data") form = Form(enctype='multipart/form-data')
form.add(StringWidget, "name", title=_('Role Name'), required=True, size=30, value=self.role.name) form.add(StringWidget, 'name', title=_('Role Name'), required=True, size=30, value=self.role.name)
form.add( form.add(
TextWidget, TextWidget,
"details", 'details',
title=_('Role Details'), title=_('Role Details'),
required=False, required=False,
cols=40, cols=40,
@ -63,8 +63,8 @@ class RoleUI:
title=_('Users holding the role can access to backoffice'), title=_('Users holding the role can access to backoffice'),
value=self.role.allows_backoffice_access, value=self.role.allows_backoffice_access,
) )
form.add_submit("submit", _("Submit")) form.add_submit('submit', _('Submit'))
form.add_submit("cancel", _("Cancel")) form.add_submit('cancel', _('Cancel'))
return form return form
def submit_form(self, form): def submit_form(self, form):
@ -86,7 +86,7 @@ class RoleUI:
class RolePage(Directory): class RolePage(Directory):
_q_exports = ['', "edit", "delete"] _q_exports = ['', 'edit', 'delete']
def __init__(self, component): def __init__(self, component):
try: try:
@ -192,9 +192,9 @@ class RolePage(Directory):
def delete(self): def delete(self):
form = Form(enctype='multipart/form-data') form = Form(enctype='multipart/form-data')
form.widgets.append(HtmlWidget('<p>%s</p>' % _("You are about to irrevocably delete this role."))) form.widgets.append(HtmlWidget('<p>%s</p>' % _('You are about to irrevocably delete this role.')))
form.add_submit('delete', _('Delete')) form.add_submit('delete', _('Delete'))
form.add_submit("cancel", _("Cancel")) form.add_submit('cancel', _('Cancel'))
if form.get_widget('cancel').parse(): if form.get_widget('cancel').parse():
return redirect('.') return redirect('.')
if not form.is_submitted() or form.has_errors(): if not form.is_submitted() or form.has_errors():
@ -210,7 +210,7 @@ class RolePage(Directory):
class RolesDirectory(Directory): class RolesDirectory(Directory):
_q_exports = ["", "new"] _q_exports = ['', 'new']
def _q_traverse(self, path): def _q_traverse(self, path):
get_response().breadcrumb.append(('roles/', _('Roles'))) get_response().breadcrumb.append(('roles/', _('Roles')))

View File

@ -749,7 +749,7 @@ class SettingsDirectory(AccessControlled, Directory):
if get_request().form.get('download'): if get_request().form.get('download'):
return self.export_download() return self.export_download()
form = Form(enctype="multipart/form-data") form = Form(enctype='multipart/form-data')
form.add(CheckboxWidget, 'formdefs', title=_('Forms'), value=True) form.add(CheckboxWidget, 'formdefs', title=_('Forms'), value=True)
form.add(CheckboxWidget, 'carddefs', title=_('Card Models'), value=True) form.add(CheckboxWidget, 'carddefs', title=_('Card Models'), value=True)
form.add(CheckboxWidget, 'workflows', title=_('Workflows'), value=True) form.add(CheckboxWidget, 'workflows', title=_('Workflows'), value=True)
@ -1265,7 +1265,7 @@ $('#form_default-zoom-level').on('change', function() {
return redirect('.') return redirect('.')
def debug_options(self): def debug_options(self):
form = Form(enctype="multipart/form-data") form = Form(enctype='multipart/form-data')
debug_cfg = get_cfg('debug', {}) debug_cfg = get_cfg('debug', {})
form.add( form.add(
StringWidget, StringWidget,

View File

@ -220,9 +220,9 @@ class UserPage(Directory):
r += str(_('Account federated with %s') % label) r += str(_('Account federated with %s') % label)
r += htmltext('<br />') r += htmltext('<br />')
if federation.localNameIdentifier: if federation.localNameIdentifier:
r += str(_("local: ") + federation.localNameIdentifier.content) r += str(_('local: ') + federation.localNameIdentifier.content)
if federation.remoteNameIdentifier: if federation.remoteNameIdentifier:
r += str(_("remote: ") + federation.remoteNameIdentifier.content) r += str(_('remote: ') + federation.remoteNameIdentifier.content)
r += htmltext('</li>') r += htmltext('</li>')
r += htmltext('</ul></div>') r += htmltext('</ul></div>')
@ -278,9 +278,9 @@ class UserPage(Directory):
def delete(self): def delete(self):
form = Form(enctype='multipart/form-data') form = Form(enctype='multipart/form-data')
form.widgets.append(HtmlWidget('<p>%s</p>' % _("You are about to irrevocably delete this user."))) form.widgets.append(HtmlWidget('<p>%s</p>' % _('You are about to irrevocably delete this user.')))
form.add_submit('delete', _('Delete')) form.add_submit('delete', _('Delete'))
form.add_submit("cancel", _("Cancel")) form.add_submit('cancel', _('Cancel'))
if form.get_widget('cancel').parse(): if form.get_widget('cancel').parse():
return redirect('.') return redirect('.')
if not form.is_submitted() or form.has_errors(): if not form.is_submitted() or form.has_errors():

View File

@ -791,7 +791,7 @@ class WorkflowStatusPage(Directory):
return redirect('.') return redirect('.')
def delete(self): def delete(self):
form = Form(enctype="multipart/form-data") form = Form(enctype='multipart/form-data')
if self.workflow.possible_status and len(self.workflow.possible_status) == 1: if self.workflow.possible_status and len(self.workflow.possible_status) == 1:
form.widgets.append( form.widgets.append(
HtmlWidget( HtmlWidget(
@ -1773,22 +1773,22 @@ class WorkflowPage(Directory):
return r.getvalue() return r.getvalue()
def delete(self): def delete(self):
form = Form(enctype="multipart/form-data") form = Form(enctype='multipart/form-data')
from itertools import chain from itertools import chain
for objdef in chain(FormDef.select(), CardDef.select()): for objdef in chain(FormDef.select(), CardDef.select()):
if objdef.workflow_id == self.workflow.id: if objdef.workflow_id == self.workflow.id:
form.widgets.append( form.widgets.append(
HtmlWidget('<p>%s</p>' % _("This workflow is currently in use, you cannot remove it.")) HtmlWidget('<p>%s</p>' % _('This workflow is currently in use, you cannot remove it.'))
) )
form.add_submit("cancel", _("Cancel")) form.add_submit('cancel', _('Cancel'))
break break
else: else:
form.widgets.append( form.widgets.append(
HtmlWidget('<p>%s</p>' % _("You are about to irrevocably delete this workflow.")) HtmlWidget('<p>%s</p>' % _('You are about to irrevocably delete this workflow.'))
) )
form.add_submit('delete', _('Delete')) form.add_submit('delete', _('Delete'))
form.add_submit("cancel", _("Cancel")) form.add_submit('cancel', _('Cancel'))
if form.get_widget('cancel').parse(): if form.get_widget('cancel').parse():
return redirect('.') return redirect('.')
if not form.is_submitted() or form.has_errors(): if not form.is_submitted() or form.has_errors():

View File

@ -125,4 +125,4 @@ class CardData(FormData):
'file_digest': file_digest, 'file_digest': file_digest,
} }
token = get_session().create_token('card-file-by-token', context) token = get_session().create_token('card-file-by-token', context)
return "/api/card-file-by-token/%s" % token.id return '/api/card-file-by-token/%s' % token.id

View File

@ -99,7 +99,7 @@ class CmdDeleteTenant(Command):
else: else:
schema_name = 'removed_%s_%s' % (deletion_date, dbname) schema_name = 'removed_%s_%s' % (deletion_date, dbname)
cur.execute("CREATE SCHEMA %s" % schema_name[:63]) cur.execute('CREATE SCHEMA %s' % schema_name[:63])
for table_name in tables_names: for table_name in tables_names:
cur.execute('ALTER TABLE %s SET SCHEMA %s' % (table_name, schema_name[:63])) cur.execute('ALTER TABLE %s SET SCHEMA %s' % (table_name, schema_name[:63]))

View File

@ -301,7 +301,7 @@ def get_json_from_url(
entries = misc.json_loads(misc.urlopen(url).read()) entries = misc.json_loads(misc.urlopen(url).read())
if not isinstance(entries, dict): if not isinstance(entries, dict):
raise ValueError('not a json dict') raise ValueError('not a json dict')
if entries.get('err') not in (None, 0, "0"): if entries.get('err') not in (None, 0, '0'):
details = [] details = []
for key in ['err_desc', 'err_class']: for key in ['err_desc', 'err_class']:
if entries.get(key): if entries.get(key):

View File

@ -1,2 +1,2 @@
DATETIME_FORMAT = 'Y-m-d H:i' DATETIME_FORMAT = 'Y-m-d H:i'
DATE_FORMAT = "Y-m-d" DATE_FORMAT = 'Y-m-d'

View File

@ -1,2 +1,2 @@
DATETIME_FORMAT = 'd/m/Y H:i' DATETIME_FORMAT = 'd/m/Y H:i'
DATE_FORMAT = "d/m/Y" DATE_FORMAT = 'd/m/Y'

View File

@ -350,16 +350,16 @@ class FormDef(StorableObject):
if order_by not in [field.contextual_varname, 'f%s' % field.contextual_id]: if order_by not in [field.contextual_varname, 'f%s' % field.contextual_id]:
continue continue
if field.contextual_varname == order_by: if field.contextual_varname == order_by:
order_by = "f%s" % field.contextual_id order_by = 'f%s' % field.contextual_id
if getattr(field, 'block_field', None) and 'f%s' % field.contextual_id == order_by: if getattr(field, 'block_field', None) and 'f%s' % field.contextual_id == order_by:
# field of block field, sort on the first element # field of block field, sort on the first element
order_by = "f%s->'data'->0->>'%s%s'" % ( order_by = "f%s->'data'->0->>'%s%s'" % (
field.block_field.id, field.block_field.id,
field.id, field.id,
"_display" if field.store_display_value else "", '_display' if field.store_display_value else '',
) )
elif field.store_display_value: elif field.store_display_value:
order_by += "_display" order_by += '_display'
break break
return '%s%s' % (direction, order_by) return '%s%s' % (direction, order_by)

View File

@ -450,7 +450,7 @@ class FormStatusPage(Directory, FormTemplateMixin):
response.set_status(303) response.set_status(303)
response.headers['location'] = url response.headers['location'] = url
response.content_type = 'text/plain' response.content_type = 'text/plain'
return "Your browser should redirect you" return 'Your browser should redirect you'
def export_to_json( def export_to_json(
self, self,

View File

@ -137,14 +137,14 @@ class WcsPublisher(QommonPublisher):
@classmethod @classmethod
def configure(cls, config): def configure(cls, config):
if config.has_option("main", "app_dir"): if config.has_option('main', 'app_dir'):
cls.APP_DIR = config.get("main", "app_dir") cls.APP_DIR = config.get('main', 'app_dir')
if config.has_option("main", "data_dir"): if config.has_option('main', 'data_dir'):
cls.DATA_DIR = config.get("main", "data_dir") cls.DATA_DIR = config.get('main', 'data_dir')
if config.has_option("main", "error_log"): if config.has_option('main', 'error_log'):
cls.ERROR_LOG = config.get("main", "error_log") cls.ERROR_LOG = config.get('main', 'error_log')
if config.has_option("main", "missing_appdir_redirect"): if config.has_option('main', 'missing_appdir_redirect'):
cls.missing_appdir_redirect = config.get("main", "missing_appdir_redirect") cls.missing_appdir_redirect = config.get('main', 'missing_appdir_redirect')
@classmethod @classmethod
def register_cronjobs(cls): def register_cronjobs(cls):

View File

@ -49,7 +49,7 @@ def _find_vc_version():
) as process: ) as process:
version = process.communicate()[0].splitlines()[-1].split()[2] version = process.communicate()[0].splitlines()[-1].split()[2]
if process.returncode == 0: if process.returncode == 0:
return "%s %s (Debian)" % (package, version.decode()) return '%s %s (Debian)' % (package, version.decode())
except Exception: except Exception:
pass pass
return None return None
@ -74,7 +74,7 @@ def _find_vc_version():
output = process.communicate()[0] output = process.communicate()[0]
starred_line = [x for x in output.splitlines() if x.startswith(b'*')][0] starred_line = [x for x in output.splitlines() if x.startswith(b'*')][0]
branch = str(starred_line.split()[1].decode('ascii')) branch = str(starred_line.split()[1].decode('ascii'))
url = "https://repos.entrouvert.org/%s.git/commit/?id=%s" % (package, rev) url = 'https://repos.entrouvert.org/%s.git/commit/?id=%s' % (package, rev)
if version: if version:
revision = htmltext('%s %s <a href="%s">git %s\'s branch rev:%s</a>') % ( revision = htmltext('%s %s <a href="%s">git %s\'s branch rev:%s</a>') % (
package, package,
@ -94,9 +94,9 @@ def _find_vc_version():
pass pass
else: else:
if version: if version:
revision = "%s %s (Tarball)" % (package, version) revision = '%s %s (Tarball)' % (package, version)
else: else:
revision = "%s (Tarball)" % (package) revision = '%s (Tarball)' % (package)
if not revision: if not revision:
return None return None

View File

@ -68,7 +68,7 @@ class _Timer(TenantAwareThread):
class _MainThread(TenantAwareThread): class _MainThread(TenantAwareThread):
def __init__(self): def __init__(self):
super().__init__(name="MainThread") super().__init__(name='MainThread')
self._Thread__started.set() self._Thread__started.set()
self._set_ident() self._set_ident()
with threading._active_limbo_lock: with threading._active_limbo_lock:
@ -82,18 +82,18 @@ class _MainThread(TenantAwareThread):
t = threading._pickSomeNonDaemonThread() t = threading._pickSomeNonDaemonThread()
if t: if t:
if __debug__: if __debug__:
self._note("%s: waiting for other threads", self) self._note('%s: waiting for other threads', self)
while t: while t:
t.join() t.join()
t = threading._pickSomeNonDaemonThread() t = threading._pickSomeNonDaemonThread()
if __debug__: if __debug__:
self._note("%s: exiting", self) self._note('%s: exiting', self)
self._Thread__delete() self._Thread__delete()
class _DummyThread(TenantAwareThread): class _DummyThread(TenantAwareThread):
def __init__(self): def __init__(self):
super().__init__(name=threading._newname("Dummy-%d"), daemon=True) super().__init__(name=threading._newname('Dummy-%d'), daemon=True)
self._started.set() self._started.set()
self._set_ident() self._set_ident()
@ -108,7 +108,7 @@ class _DummyThread(TenantAwareThread):
return True return True
def join(self, timeout=None): def join(self, timeout=None):
assert False, "cannot join a dummy thread" assert False, 'cannot join a dummy thread'
class AppConfig(django.apps.AppConfig): class AppConfig(django.apps.AppConfig):

View File

@ -59,9 +59,9 @@ class Command:
self.config.add_section('main') self.config.add_section('main')
sub_options, args = self.parse_args(args) sub_options, args = self.parse_args(args)
if sub_options.app_dir: if sub_options.app_dir:
self.config.set("main", "app_dir", sub_options.app_dir) self.config.set('main', 'app_dir', sub_options.app_dir)
if sub_options.data_dir: if sub_options.data_dir:
self.config.set("main", "data_dir", sub_options.data_dir) self.config.set('main', 'data_dir', sub_options.data_dir)
return self.execute(base_options, sub_options, args) return self.execute(base_options, sub_options, args)
def parse_args(self, args): def parse_args(self, args):
@ -97,7 +97,7 @@ class Ctl:
help=_('use a non default configuration file'), help=_('use a non default configuration file'),
) )
self.parser.add_option( self.parser.add_option(
'--help', action='callback', callback=self.print_help, help=_("Display this help and exit") '--help', action='callback', callback=self.print_help, help=_('Display this help and exit')
) )
def load_all_commands(self, ignore_errors=True): def load_all_commands(self, ignore_errors=True):

View File

@ -142,8 +142,8 @@ class InspectException(Exception):
TraversalError.title = _('Page not found') TraversalError.title = _('Page not found')
TraversalError.description = _( TraversalError.description = _(
"The requested link does not exist on this site. If " 'The requested link does not exist on this site. If '
"you arrived here by following a link from an external " 'you arrived here by following a link from an external '
"page, please inform that page's maintainer." "page, please inform that page's maintainer."
) )

View File

@ -288,7 +288,7 @@ class Template:
self.parse_file(fname, base_format) self.parse_file(fname, base_format)
def parse_file(self, fname, base_format=FORMAT_RAW): def parse_file(self, fname, base_format=FORMAT_RAW):
"fname -> a string object with pathname of file containg an EZT template." 'fname -> a string object with pathname of file containg an EZT template.'
self.parse(_FileReader(fname), base_format) self.parse(_FileReader(fname), base_format)
@ -509,7 +509,7 @@ class Template:
self._execute(self._parse(reader.read_other(fname)), fp, ctx) self._execute(self._parse(reader.read_other(fname)), fp, ctx)
def _cmd_if_any(self, args, fp, ctx): def _cmd_if_any(self, args, fp, ctx):
"If any value is a non-empty string or non-empty list, then T else F." 'If any value is a non-empty string or non-empty list, then T else F.'
(valrefs, t_section, f_section) = args (valrefs, t_section, f_section) = args
value = 0 value = 0
for valref in valrefs: for valref in valrefs:
@ -580,7 +580,7 @@ class Template:
def boolean(value): def boolean(value):
"Return a value suitable for [if-any bool_var] usage in a template." 'Return a value suitable for [if-any bool_var] usage in a template.'
if value: if value:
return 'yes' return 'yes'
return None return None
@ -755,7 +755,7 @@ class _context:
class Reader: class Reader:
"Abstract class which allows EZT to detect Reader objects." 'Abstract class which allows EZT to detect Reader objects.'
class _FileReader(Reader): class _FileReader(Reader):

View File

@ -242,7 +242,7 @@ def file_render_content(self):
attrs['aria-required'] = 'true' attrs['aria-required'] = 'true'
if self.attrs: if self.attrs:
attrs.update(self.attrs) attrs.update(self.attrs)
return htmltag("input", xml_end=True, type=self.HTML_TYPE, name=self.name, value=self.value, **attrs) return htmltag('input', xml_end=True, type=self.HTML_TYPE, name=self.name, value=self.value, **attrs)
FileWidget.render_content = file_render_content FileWidget.render_content = file_render_content
@ -339,12 +339,12 @@ Widget.transfer_form_value = transfer_form_value
class Form(QuixoteForm): class Form(QuixoteForm):
TOKEN_NOTICE = _( TOKEN_NOTICE = _(
"The form you have submitted is invalid. Most " 'The form you have submitted is invalid. Most '
"likely it has been successfully submitted once " 'likely it has been successfully submitted once '
"already. Please review the form data " 'already. Please review the form data '
"and submit the form again." 'and submit the form again.'
) )
ERROR_NOTICE = _("There were errors processing your form. See below for details.") ERROR_NOTICE = _('There were errors processing your form. See below for details.')
info = None info = None
captcha = None captcha = None
@ -673,7 +673,7 @@ class StringWidget(QuixoteStringWidget):
attrs.update(self.attrs) attrs.update(self.attrs)
if getattr(self, 'inputmode', None): if getattr(self, 'inputmode', None):
attrs['inputmode'] = self.inputmode attrs['inputmode'] = self.inputmode
return htmltag("input", xml_end=True, type=self.HTML_TYPE, name=self.name, value=self.value, **attrs) return htmltag('input', xml_end=True, type=self.HTML_TYPE, name=self.name, value=self.value, **attrs)
class DurationWidget(StringWidget): class DurationWidget(StringWidget):
@ -744,9 +744,9 @@ class TextWidget(QuixoteTextWidget):
if attrs.get('readonly') and not self.value: if attrs.get('readonly') and not self.value:
attrs['rows'] = 1 attrs['rows'] = 1
return ( return (
htmltag("textarea", name=self.name, **attrs) htmltag('textarea', name=self.name, **attrs)
+ htmlescape(self.value or "") + htmlescape(self.value or '')
+ htmltext("</textarea>") + htmltext('</textarea>')
) )
@ -780,12 +780,12 @@ class CheckboxWidget(QuixoteCheckboxWidget):
# hack to restore value on click # hack to restore value on click
attrs['onclick'] = 'this.checked = !this.checked;' attrs['onclick'] = 'this.checked = !this.checked;'
checkbox = htmltag( checkbox = htmltag(
"input", 'input',
xml_end=True, xml_end=True,
type="checkbox", type='checkbox',
name=self.name, name=self.name,
value="yes", value='yes',
checked=self.value and "checked" or None, checked=self.value and 'checked' or None,
**attrs, **attrs,
) )
if standalone: if standalone:
@ -1179,13 +1179,13 @@ class SingleSelectWidget(quixote.form.widget.SingleSelectWidget):
attrs['aria-required'] = 'true' attrs['aria-required'] = 'true'
if self.attrs: if self.attrs:
attrs.update(self.attrs) attrs.update(self.attrs)
tags = [htmltag("select", name=self.name, **attrs)] tags = [htmltag('select', name=self.name, **attrs)]
opened_optgroup = False opened_optgroup = False
for obj, description, key, attrs in self.full_options: for obj, description, key, attrs in self.full_options:
if isinstance(obj, OptGroup): if isinstance(obj, OptGroup):
if opened_optgroup: if opened_optgroup:
tags.append(htmltext("</optgroup>")) tags.append(htmltext('</optgroup>'))
tags.append(htmltag("optgroup", label=obj.title)) tags.append(htmltag('optgroup', label=obj.title))
opened_optgroup = True opened_optgroup = True
continue continue
if self.is_selected(obj): if self.is_selected(obj):
@ -1193,13 +1193,13 @@ class SingleSelectWidget(quixote.form.widget.SingleSelectWidget):
else: else:
selected = None selected = None
if description is None: if description is None:
description = "" description = ''
r = htmltag("option", value=key, selected=selected, **attrs) r = htmltag('option', value=key, selected=selected, **attrs)
tags.append(r + htmlescape(description) + htmltext('</option>')) tags.append(r + htmlescape(description) + htmltext('</option>'))
if opened_optgroup: if opened_optgroup:
tags.append(htmltext("</optgroup>")) tags.append(htmltext('</optgroup>'))
tags.append(htmltext("</select>")) tags.append(htmltext('</select>'))
return htmltext("\n").join(tags) return htmltext('\n').join(tags)
class ValidationCondition(Condition): class ValidationCondition(Condition):
@ -1973,7 +1973,7 @@ class WidgetList(quixote.form.widget.WidgetList):
value=None, value=None,
element_type=StringWidget, element_type=StringWidget,
element_kwargs=None, element_kwargs=None,
add_element_label="Add row", add_element_label='Add row',
default_items_count=None, default_items_count=None,
max_items=None, max_items=None,
**kwargs, **kwargs,
@ -2562,7 +2562,7 @@ class WysiwygTextWidget(TextWidget):
return ( return (
htmltag('textarea', name=self.name, **attrs) htmltag('textarea', name=self.name, **attrs)
+ htmlescape(self.value or '') + htmlescape(self.value or '')
+ htmltext("</textarea>") + htmltext('</textarea>')
) )
@ -2864,7 +2864,7 @@ class TableListRowsWidget(WidgetListAsTable):
return klass return klass
def add_element(self, value=None): def add_element(self, value=None):
name = "element%d" % len(self.element_names) name = 'element%d' % len(self.element_names)
self.add(self.table_row_class, name, value=value, **self.widget_kwargs) self.add(self.table_row_class, name, value=value, **self.widget_kwargs)
self.element_names.append(name) self.element_names.append(name)

View File

@ -121,20 +121,20 @@ class HTTPRequest(quixote.http_request.HTTPRequest):
row = '%-15s %r' row = '%-15s %r'
if self.form: if self.form:
result.append("Form:") result.append('Form:')
for k, v in sorted(self.form.items()): for k, v in sorted(self.form.items()):
result.append(row % (k, v)) result.append(row % (k, v))
result.append("") result.append('')
result.append("Cookies:") result.append('Cookies:')
for k, v in sorted(self.cookies.items()): for k, v in sorted(self.cookies.items()):
result.append(row % (k, v)) result.append(row % (k, v))
result.append("") result.append('')
result.append("Environment:") result.append('Environment:')
for k, v in sorted(self.environ.items()): for k, v in sorted(self.environ.items()):
result.append(row % (k, v)) result.append(row % (k, v))
return "\n".join(result) return '\n'.join(result)
def process_inputs(self): def process_inputs(self):
if self.parsed: if self.parsed:
@ -147,7 +147,7 @@ class HTTPRequest(quixote.http_request.HTTPRequest):
length = int(length) length = int(length)
except ValueError: except ValueError:
raise RequestError('invalid content-length header') raise RequestError('invalid content-length header')
ctype = self.environ.get("CONTENT_TYPE") ctype = self.environ.get('CONTENT_TYPE')
if self.django_request: if self.django_request:
self.stdin = self.django_request self.stdin = self.django_request
if ctype: if ctype:

View File

@ -29,17 +29,17 @@ def list2human(stringlist):
'''Transform a string list to human enumeration''' '''Transform a string list to human enumeration'''
beginning = stringlist[:-1] beginning = stringlist[:-1]
if not beginning: if not beginning:
return "".join(stringlist) return ''.join(stringlist)
return _("%(first)s and %(second)s") % {'first': _(", ").join(beginning), 'second': stringlist[-1]} return _('%(first)s and %(second)s') % {'first': _(', ').join(beginning), 'second': stringlist[-1]}
_humandurations = ( _humandurations = (
((_("day"), _("days")), _day), ((_('day'), _('days')), _day),
((_("hour"), _("hours")), _hour), ((_('hour'), _('hours')), _hour),
((_("month"), _("months")), _month), ((_('month'), _('months')), _month),
((_("year"), _("years")), _year), ((_('year'), _('years')), _year),
((_("minute"), _("minutes")), _minute), ((_('minute'), _('minutes')), _minute),
((_("second"), _("seconds")), 1), ((_('second'), _('seconds')), 1),
) )
@ -58,7 +58,7 @@ def humanduration2seconds(humanduration):
seconds = 0 seconds = 0
for words, quantity in _humandurations: for words, quantity in _humandurations:
for word in words: for word in words:
m = re.search(r"(\d+)\s*\b%s\b" % word, humanduration) m = re.search(r'(\d+)\s*\b%s\b' % word, humanduration)
if m: if m:
seconds = seconds + int(m.group(1)) * quantity seconds = seconds + int(m.group(1)) * quantity
break break
@ -68,7 +68,7 @@ def humanduration2seconds(humanduration):
def seconds2humanduration(seconds, short=False): def seconds2humanduration(seconds, short=False):
"""Convert a time range in seconds to a human string representation""" """Convert a time range in seconds to a human string representation"""
if not isinstance(seconds, int): if not isinstance(seconds, int):
return "" return ''
days = int(seconds / _day) days = int(seconds / _day)
seconds = seconds - _day * days seconds = seconds - _day * days

View File

@ -70,7 +70,7 @@ def get_text_file_preview(filename):
""" """
content = get_file_content(str(filename)) content = get_file_content(str(filename))
if content: if content:
return htmltext("<pre>%s</pre>") % content return htmltext('<pre>%s</pre>') % content
else: else:
return None return None
@ -82,7 +82,7 @@ class MethodDirectory(Directory):
idps = get_cfg('idp', {}) idps = get_cfg('idp', {})
if not lasso: if not lasso:
raise Exception("lasso is missing, idp method cannot be used") raise Exception('lasso is missing, idp method cannot be used')
if len(idps) == 0: if len(idps) == 0:
return template.error_page(_('SSO support is not yet configured')) return template.error_page(_('SSO support is not yet configured'))

View File

@ -23,7 +23,7 @@ from wcs.qommon.publisher import get_publisher_class
class Command(BaseCommand): class Command(BaseCommand):
help = "Collect static files in a single location." help = 'Collect static files in a single location.'
def add_arguments(self, parser): def add_arguments(self, parser):
parser.add_argument( parser.add_argument(
@ -32,8 +32,8 @@ class Command(BaseCommand):
action='store_true', action='store_true',
dest='clear', dest='clear',
default=False, default=False,
help="Clear the existing files using the storage " help='Clear the existing files using the storage '
"before trying to copy or link the original file.", 'before trying to copy or link the original file.',
) )
parser.add_argument( parser.add_argument(
'-l', '-l',
@ -41,7 +41,7 @@ class Command(BaseCommand):
action='store_true', action='store_true',
dest='link', dest='link',
default=False, default=False,
help="Create a symbolic link to each file instead of copying.", help='Create a symbolic link to each file instead of copying.',
) )
def handle(self, **options): def handle(self, **options):

View File

@ -137,9 +137,9 @@ def get_provider_label(provider):
if not organization: if not organization:
return provider.providerId return provider.providerId
name = re.findall("<OrganizationDisplayName.*>(.*?)</OrganizationDisplayName>", organization) name = re.findall('<OrganizationDisplayName.*>(.*?)</OrganizationDisplayName>', organization)
if not name: if not name:
name = re.findall("<OrganizationName.*>(.*?)</OrganizationName>", organization) name = re.findall('<OrganizationName.*>(.*?)</OrganizationName>', organization)
if not name: if not name:
return provider.providerId return provider.providerId
return htmltext(name[0].decode('utf8').encode(get_publisher().site_charset)) return htmltext(name[0].decode('utf8').encode(get_publisher().site_charset))
@ -561,10 +561,10 @@ def get_foreground_colour(background_colour):
def indent_xml(elem, level=0): def indent_xml(elem, level=0):
# in-place prettyprint formatter # in-place prettyprint formatter
# http://effbot.org/zone/element-lib.htm#prettyprint # http://effbot.org/zone/element-lib.htm#prettyprint
i = "\n" + level * " " i = '\n' + level * ' '
if len(elem): if len(elem):
if not elem.text or not elem.text.strip(): if not elem.text or not elem.text.strip():
elem.text = i + " " elem.text = i + ' '
for elem in elem: for elem in elem:
indent_xml(elem, level + 1) indent_xml(elem, level + 1)
if not elem.tail or not elem.tail.strip(): if not elem.tail or not elem.tail.strip():

View File

@ -228,7 +228,7 @@ class QommonPublisher(Publisher):
if limit is None: if limit is None:
if hasattr(sys, 'tracebacklimit'): if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit limit = sys.tracebacklimit
print("Exception:", file=error_file) print('Exception:', file=error_file)
print(" type = '%s', value = '%s'" % (exc_type, exc_value), file=error_file) print(" type = '%s', value = '%s'" % (exc_type, exc_value), file=error_file)
print('', file=error_file) print('', file=error_file)
@ -255,9 +255,9 @@ class QommonPublisher(Publisher):
print(' %5s %s' % (lineno, line.rstrip()), file=error_file) print(' %5s %s' % (lineno, line.rstrip()), file=error_file)
print('', file=error_file) print('', file=error_file)
if locals: if locals:
print(" locals: ", file=error_file) print(' locals: ', file=error_file)
for key, value in locals: for key, value in locals:
print(" %s =" % key, end=' ', file=error_file) print(' %s =' % key, end=' ', file=error_file)
value = safe_filter.cleanse_setting(key, value) value = safe_filter.cleanse_setting(key, value)
try: try:
repr_value = repr(value) repr_value = repr(value)
@ -265,7 +265,7 @@ class QommonPublisher(Publisher):
repr_value = repr_value[:10000] + ' [...]' repr_value = repr_value[:10000] + ' [...]'
print(repr_value, file=error_file) print(repr_value, file=error_file)
except Exception: except Exception:
print("<ERROR WHILE PRINTING VALUE>", file=error_file) print('<ERROR WHILE PRINTING VALUE>', file=error_file)
print('', file=error_file) print('', file=error_file)
frame = frame.f_back frame = frame.f_back
n = n + 1 n = n + 1
@ -813,7 +813,7 @@ class QommonPublisher(Publisher):
'map-bounds-bottom-right' 'map-bounds-bottom-right'
).split(';') ).split(';')
attrs['data-map-attribution'] = self.get_site_option('map-attribution') or _( attrs['data-map-attribution'] = self.get_site_option('map-attribution') or _(
"Map data &copy; " 'Map data &copy; '
"<a href='https://openstreetmap.org'>OpenStreetMap</a> contributors, " "<a href='https://openstreetmap.org'>OpenStreetMap</a> contributors, "
"<a href='http://creativecommons.org/licenses/by-sa/2.0/'>CC-BY-SA</a>" "<a href='http://creativecommons.org/licenses/by-sa/2.0/'>CC-BY-SA</a>"
) )

View File

@ -294,7 +294,7 @@ class Saml2Directory(Directory):
def sso_after_response(self, login): def sso_after_response(self, login):
try: try:
assertion = login.response.assertion[0] assertion = login.response.assertion[0]
last_slash = get_request().get_url().rfind("/") last_slash = get_request().get_url().rfind('/')
if ( if (
assertion.subject.subjectConfirmation.subjectConfirmationData.recipient assertion.subject.subjectConfirmation.subjectConfirmationData.recipient
!= get_cfg('sp', {}).get('saml2_base_url') + get_request().get_url()[last_slash:] != get_cfg('sp', {}).get('saml2_base_url') + get_request().get_url()[last_slash:]
@ -406,7 +406,7 @@ class Saml2Directory(Directory):
response.set_status(303) response.set_status(303)
response.headers['location'] = after_url response.headers['location'] = after_url
response.content_type = 'text/plain' response.content_type = 'text/plain'
return "Your browser should redirect you" return 'Your browser should redirect you'
def assertionConsumerPost(self): def assertionConsumerPost(self):
message = get_field('SAMLResponse') message = get_field('SAMLResponse')

View File

@ -86,7 +86,7 @@ def atomic_write(path, content, async_op=False):
dirname = os.path.dirname(path) dirname = os.path.dirname(path)
fd, temp = tempfile.mkstemp(dir=dirname, prefix='.tmp-' + os.path.basename(path) + '-') fd, temp = tempfile.mkstemp(dir=dirname, prefix='.tmp-' + os.path.basename(path) + '-')
os.fchmod(fd, 0o666 & ~process_umask) os.fchmod(fd, 0o666 & ~process_umask)
f = os.fdopen(fd, "wb") f = os.fdopen(fd, 'wb')
if hasattr(content, 'read'): if hasattr(content, 'read'):
# file pointer # file pointer
def read100k(): def read100k():

View File

@ -35,7 +35,7 @@ import threading
import time import time
import weakref import weakref
__all__ = ["lock_file"] __all__ = ['lock_file']
import fcntl import fcntl
@ -159,7 +159,7 @@ class _LockFile:
def acquire(self): def acquire(self):
if self._file is None: if self._file is None:
self._file = open(self._path, "w") # pylint: disable=consider-using-with self._file = open(self._path, 'w') # pylint: disable=consider-using-with
if self._timeout is None: if self._timeout is None:
_lock_file_blocking(self._file) _lock_file_blocking(self._file)
else: else:

View File

@ -88,13 +88,13 @@ TEMPLATES = [
'APP_DIRS': False, 'APP_DIRS': False,
'OPTIONS': { 'OPTIONS': {
'context_processors': [ 'context_processors': [
"django.template.context_processors.debug", 'django.template.context_processors.debug',
"django.template.context_processors.i18n", 'django.template.context_processors.i18n',
"django.template.context_processors.media", 'django.template.context_processors.media',
"django.template.context_processors.static", 'django.template.context_processors.static',
"django.template.context_processors.tz", 'django.template.context_processors.tz',
"django.contrib.messages.context_processors.messages", 'django.contrib.messages.context_processors.messages',
"wcs.context_processors.publisher", 'wcs.context_processors.publisher',
], ],
'loaders': [ 'loaders': [
'wcs.utils.TemplateLoader', 'wcs.utils.TemplateLoader',

View File

@ -29,17 +29,17 @@ class UnknownUser:
return str(_('unknown user')) return str(_('unknown user'))
def indent(tree, space=" ", level=0): def indent(tree, space=' ', level=0):
# backport from Lib/xml/etree/ElementTree.py python 3.9 # backport from Lib/xml/etree/ElementTree.py python 3.9
if isinstance(tree, ET.ElementTree): if isinstance(tree, ET.ElementTree):
tree = tree.getroot() tree = tree.getroot()
if level < 0: if level < 0:
raise ValueError(f"Initial indentation level must be >= 0, got {level}") raise ValueError(f'Initial indentation level must be >= 0, got {level}')
if len(tree) == 0: if len(tree) == 0:
return return
# Reduce the memory consumption by reusing indentation strings. # Reduce the memory consumption by reusing indentation strings.
indentations = ["\n" + level * space] indentations = ['\n' + level * space]
def _indent_children(elem, level): def _indent_children(elem, level):
# Start a new indentation level for the first child. # Start a new indentation level for the first child.
@ -66,8 +66,8 @@ def indent(tree, space=" ", level=0):
_indent_children(tree, 0) _indent_children(tree, 0)
_no_eol = "\\ No newline at end of file" _no_eol = '\\ No newline at end of file'
_hdr_pat = re.compile(r"^@@ -(\d+),?(\d+)? \+(\d+),?(\d+)? @@$") _hdr_pat = re.compile(r'^@@ -(\d+),?(\d+)? \+(\d+),?(\d+)? @@$')
def make_patch(a, b): def make_patch(a, b):
@ -93,15 +93,15 @@ def apply_patch(s, patch, revert=False):
t = '' t = ''
i = sl = 0 i = sl = 0
(midx, sign) = (1, '+') if not revert else (3, '-') (midx, sign) = (1, '+') if not revert else (3, '-')
while i < len(p) and p[i].startswith(("---", "+++")): while i < len(p) and p[i].startswith(('---', '+++')):
i += 1 # skip header lines i += 1 # skip header lines
while i < len(p): while i < len(p):
m = _hdr_pat.match(p[i]) m = _hdr_pat.match(p[i])
if not m: if not m:
raise Exception("Bad patch -- regex mismatch [line " + str(i) + "]") raise Exception('Bad patch -- regex mismatch [line ' + str(i) + ']')
_l = int(m.group(midx)) - 1 + (m.group(midx + 1) == '0') _l = int(m.group(midx)) - 1 + (m.group(midx + 1) == '0')
if sl > _l or _l > len(s): if sl > _l or _l > len(s):
raise Exception("Bad patch -- bad line num [line " + str(i) + "]") raise Exception('Bad patch -- bad line num [line ' + str(i) + ']')
t += ''.join(s[sl:_l]) t += ''.join(s[sl:_l])
sl = _l sl = _l
i += 1 i += 1

View File

@ -554,20 +554,20 @@ def recreate_trigger(formdef, cur, conn):
# recreate the trigger function, just so it's uptodate # recreate the trigger function, just so it's uptodate
table_name = get_formdef_table_name(formdef) table_name = get_formdef_table_name(formdef)
category_value = formdef.category_id category_value = formdef.category_id
geoloc_base_x_query = "NULL" geoloc_base_x_query = 'NULL'
geoloc_base_y_query = "NULL" geoloc_base_y_query = 'NULL'
if formdef.geolocations and 'base' in formdef.geolocations: if formdef.geolocations and 'base' in formdef.geolocations:
# default geolocation is in the 'base' key; we have to unstructure the # default geolocation is in the 'base' key; we have to unstructure the
# field is the POINT type of postgresql cannot be used directly as it # field is the POINT type of postgresql cannot be used directly as it
# doesn't have an equality operator. # doesn't have an equality operator.
geoloc_base_x_query = "NEW.geoloc_base[0]" geoloc_base_x_query = 'NEW.geoloc_base[0]'
geoloc_base_y_query = "NEW.geoloc_base[1]" geoloc_base_y_query = 'NEW.geoloc_base[1]'
if formdef.category_id is None: if formdef.category_id is None:
category_value = "NULL" category_value = 'NULL'
criticality_levels = len(formdef.workflow.criticality_levels or [0]) criticality_levels = len(formdef.workflow.criticality_levels or [0])
endpoint_status = formdef.workflow.get_endpoint_status() endpoint_status = formdef.workflow.get_endpoint_status()
endpoint_status_filter = ", ".join(["'wf-%s'" % x.id for x in endpoint_status]) endpoint_status_filter = ', '.join(["'wf-%s'" % x.id for x in endpoint_status])
if endpoint_status_filter == "": if endpoint_status_filter == '':
# not the prettiest in town, but will do fine for now. # not the prettiest in town, but will do fine for now.
endpoint_status_filter = "'xxxx'" endpoint_status_filter = "'xxxx'"
formed_name_quotedstring = psycopg2.extensions.QuotedString(formdef.name) formed_name_quotedstring = psycopg2.extensions.QuotedString(formdef.name)
@ -1481,17 +1481,17 @@ def do_global_views(conn, cur):
)""" )"""
) )
cur.execute( cur.execute(
'''CREATE INDEX IF NOT EXISTS %s_fts ON %s USING gin(fts)''' % ("wcs_all_forms", "wcs_all_forms") '''CREATE INDEX IF NOT EXISTS %s_fts ON %s USING gin(fts)''' % ('wcs_all_forms', 'wcs_all_forms')
) )
for attr in ('receipt_time', 'anonymised', 'user_id', 'status'): for attr in ('receipt_time', 'anonymised', 'user_id', 'status'):
cur.execute( cur.execute(
'''CREATE INDEX IF NOT EXISTS %s_%s ON %s (%s)''' % ("wcs_all_forms", attr, "wcs_all_forms", attr) '''CREATE INDEX IF NOT EXISTS %s_%s ON %s (%s)''' % ('wcs_all_forms', attr, 'wcs_all_forms', attr)
) )
for attr in ('concerned_roles_array', 'actions_roles_array'): for attr in ('concerned_roles_array', 'actions_roles_array'):
cur.execute( cur.execute(
'''CREATE INDEX IF NOT EXISTS %s_%s ON %s USING gin (%s)''' '''CREATE INDEX IF NOT EXISTS %s_%s ON %s USING gin (%s)'''
% ("wcs_all_forms", attr, "wcs_all_forms", attr) % ('wcs_all_forms', attr, 'wcs_all_forms', attr)
) )
# make sure the table will not be changed while we work on it # make sure the table will not be changed while we work on it
@ -1551,19 +1551,19 @@ def init_global_table(conn=None, cur=None):
for formdef in FormDef.select(): for formdef in FormDef.select():
category_value = formdef.category_id category_value = formdef.category_id
if formdef.category_id is None: if formdef.category_id is None:
category_value = "NULL" category_value = 'NULL'
geoloc_base_x_query = "NULL" geoloc_base_x_query = 'NULL'
geoloc_base_y_query = "NULL" geoloc_base_y_query = 'NULL'
if formdef.geolocations and 'base' in formdef.geolocations: if formdef.geolocations and 'base' in formdef.geolocations:
# default geolocation is in the 'base' key; we have to unstructure the # default geolocation is in the 'base' key; we have to unstructure the
# field is the POINT type of postgresql cannot be used directly as it # field is the POINT type of postgresql cannot be used directly as it
# doesn't have an equality operator. # doesn't have an equality operator.
geoloc_base_x_query = "geoloc_base[0]" geoloc_base_x_query = 'geoloc_base[0]'
geoloc_base_y_query = "geoloc_base[1]" geoloc_base_y_query = 'geoloc_base[1]'
criticality_levels = len(formdef.workflow.criticality_levels or [0]) criticality_levels = len(formdef.workflow.criticality_levels or [0])
endpoint_status = formdef.workflow.get_endpoint_status() endpoint_status = formdef.workflow.get_endpoint_status()
endpoint_status_filter = ", ".join(["'wf-%s'" % x.id for x in endpoint_status]) endpoint_status_filter = ', '.join(["'wf-%s'" % x.id for x in endpoint_status])
if endpoint_status_filter == "": if endpoint_status_filter == '':
# not the prettiest in town, but will do fine for now. # not the prettiest in town, but will do fine for now.
endpoint_status_filter = "'xxxx'" endpoint_status_filter = "'xxxx'"
formed_name_quotedstring = psycopg2.extensions.QuotedString(formdef.name) formed_name_quotedstring = psycopg2.extensions.QuotedString(formdef.name)
@ -2082,7 +2082,7 @@ class SqlMixin:
elif field.key == 'computed': elif field.key == 'computed':
if not isinstance(value, dict): if not isinstance(value, dict):
raise ValueError( raise ValueError(
"bad data %s (type %s) in computed field %s" % (value, type(value), field.id) 'bad data %s (type %s) in computed field %s' % (value, type(value), field.id)
) )
if value.get('@type') == 'computed-data': if value.get('@type') == 'computed-data':
value = value.get('data') value = value.get('data')
@ -2575,7 +2575,7 @@ class SqlDataMixin(SqlMixin):
value = row[len(cls._table_static_fields) + i] value = row[len(cls._table_static_fields) + i]
if not value: if not value:
continue continue
m = re.match(r"\(([^)]+),([^)]+)\)", value) m = re.match(r'\(([^)]+),([^)]+)\)', value)
o.geolocations[field] = {'lon': float(m.group(1)), 'lat': float(m.group(2))} o.geolocations[field] = {'lon': float(m.group(1)), 'lat': float(m.group(2))}
o.data = cls._row2obdata(row, cls._formdef) o.data = cls._row2obdata(row, cls._formdef)
@ -3176,7 +3176,7 @@ class TransientData(SqlMixin):
try: try:
cur.execute(sql_statement, sql_dict) cur.execute(sql_statement, sql_dict)
except psycopg2.IntegrityError as e: except psycopg2.IntegrityError as e:
if "transient_data_session_id_fkey" not in str(e): if 'transient_data_session_id_fkey' not in str(e):
raise raise
conn.commit() conn.commit()
@ -5024,7 +5024,7 @@ def get_cron_status():
conn, cur = get_connection_and_cursor() conn, cur = get_connection_and_cursor()
do_meta_table(conn, cur, insert_current_sql_level=False) do_meta_table(conn, cur, insert_current_sql_level=False)
key = 'cron-status-%s' % get_publisher().tenant.hostname key = 'cron-status-%s' % get_publisher().tenant.hostname
cur.execute("SELECT value, updated_at FROM wcs_meta WHERE key = %s", (key,)) cur.execute('SELECT value, updated_at FROM wcs_meta WHERE key = %s', (key,))
row = cur.fetchone() row = cur.fetchone()
conn.commit() conn.commit()
cur.close() cur.close()
@ -5036,7 +5036,7 @@ def get_and_update_cron_status():
conn, cur = get_connection_and_cursor() conn, cur = get_connection_and_cursor()
do_meta_table(conn, cur, insert_current_sql_level=False) do_meta_table(conn, cur, insert_current_sql_level=False)
key = 'cron-status-%s' % get_publisher().tenant.hostname key = 'cron-status-%s' % get_publisher().tenant.hostname
cur.execute("SELECT value, created_at FROM wcs_meta WHERE key = %s FOR UPDATE", (key,)) cur.execute('SELECT value, created_at FROM wcs_meta WHERE key = %s FOR UPDATE', (key,))
row = cur.fetchone() row = cur.fetchone()
timestamp = now() timestamp = now()
if row is None: if row is None:
@ -5065,7 +5065,7 @@ def get_and_update_cron_status():
def mark_cron_status(status): def mark_cron_status(status):
conn, cur = get_connection_and_cursor() conn, cur = get_connection_and_cursor()
key = 'cron-status-%s' % get_publisher().tenant.hostname key = 'cron-status-%s' % get_publisher().tenant.hostname
cur.execute("UPDATE wcs_meta SET value = %s, updated_at = NOW() WHERE key = %s", (status, key)) cur.execute('UPDATE wcs_meta SET value = %s, updated_at = NOW() WHERE key = %s', (status, key))
conn.commit() conn.commit()
cur.close() cur.close()

View File

@ -122,15 +122,15 @@ class Criteria(wcs.qommon.storage.Criteria):
) )
else: else:
# for none values # for none values
attribute = "COALESCE(%s, ARRAY[]::text[])" % attribute attribute = 'COALESCE(%s, ARRAY[]::text[])' % attribute
if isinstance(self, Between): if isinstance(self, Between):
return "%s(SELECT 1 FROM UNNEST(%s) bb(aa) WHERE aa >= %%(c%s)s AND aa < %%(c%s)s)" % ( return '%s(SELECT 1 FROM UNNEST(%s) bb(aa) WHERE aa >= %%(c%s)s AND aa < %%(c%s)s)' % (
getattr(self, 'sql_exists', 'EXISTS'), getattr(self, 'sql_exists', 'EXISTS'),
attribute, attribute,
id(self.value[0]), id(self.value[0]),
id(self.value[1]), id(self.value[1]),
) )
return "%s(SELECT 1 FROM UNNEST(%s) bb(aa) WHERE aa %s %%(c%s)s)" % ( return '%s(SELECT 1 FROM UNNEST(%s) bb(aa) WHERE aa %s %%(c%s)s)' % (
getattr(self, 'sql_exists', 'EXISTS'), getattr(self, 'sql_exists', 'EXISTS'),
attribute, attribute,
getattr(self, 'sql_op_exists', self.sql_op), getattr(self, 'sql_op_exists', self.sql_op),
@ -218,7 +218,7 @@ class NotEqual(Criteria):
def as_sql(self): def as_sql(self):
if self.field and getattr(self.field, 'block_field', None): if self.field and getattr(self.field, 'block_field', None):
return super().as_sql() return super().as_sql()
return "(%s is NULL OR %s)" % (self.attribute, super().as_sql()) return '(%s is NULL OR %s)' % (self.attribute, super().as_sql())
class StrictNotEqual(Criteria): class StrictNotEqual(Criteria):

View File

@ -538,12 +538,12 @@ class FormsCountView(RestrictedView):
if group_by == 'channel': if group_by == 'channel':
totals_kwargs['group_by'] = 'submission_channel_new' totals_kwargs['group_by'] = 'submission_channel_new'
totals_kwargs['group_by_clause'] = ( totals_kwargs['group_by_clause'] = (
"CASE " 'CASE '
"WHEN submission_channel IN ('web', '') OR submission_channel IS NULL THEN " "WHEN submission_channel IN ('web', '') OR submission_channel IS NULL THEN "
"CASE WHEN backoffice_submission THEN 'backoffice' ELSE 'web' END " "CASE WHEN backoffice_submission THEN 'backoffice' ELSE 'web' END "
"ELSE submission_channel " 'ELSE submission_channel '
"END " 'END '
"as submission_channel_new, " 'as submission_channel_new, '
) )
group_labels.update(FormData.get_submission_channels()) group_labels.update(FormData.get_submission_channels())

View File

@ -607,7 +607,7 @@ class CreateFormdataWorkflowStatusItem(WorkflowStatusItem):
src.evolution[-1].add_part( src.evolution[-1].add_part(
JournalAssignationErrorPart( JournalAssignationErrorPart(
_('Failed to attach user (not found: "%s")') % value, _('Failed to attach user (not found: "%s")') % value,
"%s (%s)" % (self.description, self.formdef.name), '%s (%s)' % (self.description, self.formdef.name),
) )
) )
src.store() src.store()

View File

@ -70,7 +70,7 @@ from .qommon.template import Template, TemplateError
from .qommon.upload_storage import PicklableUpload, get_storage_object from .qommon.upload_storage import PicklableUpload, get_storage_object
from .roles import get_user_roles, logged_users_role from .roles import get_user_roles, logged_users_role
if not __name__.startswith('wcs.') and __name__ != "__main__": if not __name__.startswith('wcs.') and __name__ != '__main__':
raise ImportError('Import of workflows module must be absolute (import wcs.workflows)') raise ImportError('Import of workflows module must be absolute (import wcs.workflows)')

View File

@ -5,5 +5,5 @@ import os
from django.core.wsgi import get_wsgi_application from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wcs.settings") os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'wcs.settings')
application = get_wsgi_application() application = get_wsgi_application()

View File

@ -3,7 +3,7 @@
import os import os
import sys import sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wcs.settings") os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'wcs.settings')
import wcs.qommon.ctl import wcs.qommon.ctl