misc: use only record_error to record and notify (#55414)

This commit is contained in:
Lauréline Guérin 2021-07-06 16:57:04 +02:00
parent d59cff0a70
commit 8314648278
No known key found for this signature in database
GPG Key ID: 1FAB9B9B4F93D473
21 changed files with 179 additions and 198 deletions

View File

@ -5906,15 +5906,12 @@ def test_item_field_autocomplete_json_source(http_requests, pub, error_email, em
data_source.store()
resp2 = app.get(select2_url + '?q=hell')
assert emails.count() == 1
assert (
emails.get_latest('subject')
== '[ERROR] [DATASOURCE] Exception: Error loading JSON data source (...)'
)
assert emails.get_latest('subject') == '[ERROR] [DATASOURCE] Error loading JSON data source (...)'
if pub.is_using_postgresql():
assert pub.loggederror_class.count() == 1
logged_error = pub.loggederror_class.select()[0]
assert logged_error.workflow_id is None
assert logged_error.summary == '[DATASOURCE] Exception: Error loading JSON data source (...)'
assert logged_error.summary == '[DATASOURCE] Error loading JSON data source (...)'
data_source.notify_on_errors = False
data_source.store()

View File

@ -146,9 +146,7 @@ def test_python_datasource_errors(pub, error_email, http_requests, emails, caplo
assert pub.loggederror_class.count() == 1
logged_error = pub.loggederror_class.select()[0]
assert logged_error.workflow_id is None
assert (
logged_error.summary == "[DATASOURCE] Exception: Failed to eval() Python data source ('foobar')"
)
assert logged_error.summary == "[DATASOURCE] Failed to eval() Python data source ('foobar')"
# expression not iterable
datasource = {'type': 'formula', 'value': '2', 'notify_on_errors': True, 'record_on_errors': True}
@ -158,10 +156,7 @@ def test_python_datasource_errors(pub, error_email, http_requests, emails, caplo
assert pub.loggederror_class.count() == 2
logged_error = pub.loggederror_class.select()[1]
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Exception: Python data source ('2') gave a non-iterable result"
)
assert logged_error.summary == "[DATASOURCE] Python data source ('2') gave a non-iterable result"
def test_python_datasource_with_evalutils(pub):
@ -405,7 +400,7 @@ def test_json_datasource_bad_url(pub, error_email, http_requests, emails, caplog
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Exception: Error loading JSON data source (error in HTTP request to http://remote.example.net/404 (status: 404))"
== "[DATASOURCE] Error loading JSON data source (error in HTTP request to http://remote.example.net/404 (status: 404))"
)
datasource = {
@ -423,7 +418,7 @@ def test_json_datasource_bad_url(pub, error_email, http_requests, emails, caplog
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Exception: Error reading JSON data source output (Expecting value: line 1 column 1 (char 0))"
== "[DATASOURCE] Error reading JSON data source output (Expecting value: line 1 column 1 (char 0))"
)
datasource = {
@ -438,7 +433,7 @@ def test_json_datasource_bad_url(pub, error_email, http_requests, emails, caplog
assert pub.loggederror_class.count() == 3
logged_error = pub.loggederror_class.select()[2]
assert logged_error.workflow_id is None
assert logged_error.summary == "[DATASOURCE] Exception: Error loading JSON data source (error)"
assert logged_error.summary == "[DATASOURCE] Error loading JSON data source (error)"
datasource = {
'type': 'json',
@ -452,7 +447,7 @@ def test_json_datasource_bad_url(pub, error_email, http_requests, emails, caplog
assert pub.loggederror_class.count() == 4
logged_error = pub.loggederror_class.select()[3]
assert logged_error.workflow_id is None
assert logged_error.summary == "[DATASOURCE] Exception: Error reading JSON data source output (err 1)"
assert logged_error.summary == "[DATASOURCE] Error reading JSON data source output (err 1)"
def test_json_datasource_bad_url_scheme(pub, error_email, emails):
@ -475,7 +470,7 @@ def test_json_datasource_bad_url_scheme(pub, error_email, emails):
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Exception: Error loading JSON data source (invalid scheme in URL foo://bar)"
== "[DATASOURCE] Error loading JSON data source (invalid scheme in URL foo://bar)"
)
datasource = {'type': 'json', 'value': '/bla/blo', 'notify_on_errors': True, 'record_on_errors': True}
@ -488,7 +483,7 @@ def test_json_datasource_bad_url_scheme(pub, error_email, emails):
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Exception: Error loading JSON data source (invalid scheme in URL /bla/blo)"
== "[DATASOURCE] Error loading JSON data source (invalid scheme in URL /bla/blo)"
)
@ -827,7 +822,7 @@ def test_geojson_datasource_bad_url(pub, http_requests, error_email, emails):
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Exception: Error loading JSON data source (error in HTTP request to http://remote.example.net/404 (status: 404))"
== "[DATASOURCE] Error loading JSON data source (error in HTTP request to http://remote.example.net/404 (status: 404))"
)
datasource = {
@ -845,7 +840,7 @@ def test_geojson_datasource_bad_url(pub, http_requests, error_email, emails):
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Exception: Error reading JSON data source output (Expecting value: line 1 column 1 (char 0))"
== "[DATASOURCE] Error reading JSON data source output (Expecting value: line 1 column 1 (char 0))"
)
datasource = {
@ -861,7 +856,7 @@ def test_geojson_datasource_bad_url(pub, http_requests, error_email, emails):
assert pub.loggederror_class.count() == 3
logged_error = pub.loggederror_class.select()[2]
assert logged_error.workflow_id is None
assert logged_error.summary == "[DATASOURCE] Exception: Error loading JSON data source (error)"
assert logged_error.summary == "[DATASOURCE] Error loading JSON data source (error)"
datasource = {
'type': 'geojson',
@ -875,7 +870,7 @@ def test_geojson_datasource_bad_url(pub, http_requests, error_email, emails):
assert pub.loggederror_class.count() == 4
logged_error = pub.loggederror_class.select()[3]
assert logged_error.workflow_id is None
assert logged_error.summary == "[DATASOURCE] Exception: Error reading JSON data source output (err 1)"
assert logged_error.summary == "[DATASOURCE] Error reading JSON data source output (err 1)"
def test_geojson_datasource_bad_url_scheme(pub, error_email, emails):
@ -896,7 +891,7 @@ def test_geojson_datasource_bad_url_scheme(pub, error_email, emails):
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Exception: Error loading JSON data source (invalid scheme in URL foo://bar)"
== "[DATASOURCE] Error loading JSON data source (invalid scheme in URL foo://bar)"
)
datasource = {'type': 'geojson', 'value': '/bla/blo', 'notify_on_errors': True, 'record_on_errors': True}
@ -909,7 +904,7 @@ def test_geojson_datasource_bad_url_scheme(pub, error_email, emails):
assert logged_error.workflow_id is None
assert (
logged_error.summary
== "[DATASOURCE] Exception: Error loading JSON data source (invalid scheme in URL /bla/blo)"
== "[DATASOURCE] Error loading JSON data source (invalid scheme in URL /bla/blo)"
)

View File

@ -663,8 +663,9 @@ def test_process_notification_user_provision(pub):
assert not User.select()[0].email
def notify_of_exception(exc_info, context):
raise Exception(exc_info)
def record_error(exception=None, *args, **kwargs):
if exception:
raise exception
def test_process_notification_user_with_errors(pub):
@ -710,15 +711,15 @@ def test_process_notification_user_with_errors(pub):
notification['full'] = False
pub.notify_of_exception = notify_of_exception
pub.record_error = record_error
for key in ('uuid', 'first_name', 'last_name', 'email'):
backup = notification['objects']['data'][0][key]
del notification['objects']['data'][0][key]
with pytest.raises(Exception) as e:
CmdHoboNotify.process_notification(notification)
assert e.value.args[0][0] == ValueError
assert e.value.args[0][1].args == ('invalid user',)
assert e.type == ValueError
assert e.value.args == ('invalid user',)
assert User.count() == 0
notification['objects']['data'][0][key] = backup
@ -726,8 +727,8 @@ def test_process_notification_user_with_errors(pub):
del notification['objects']['data'][0]['uuid']
with pytest.raises(Exception) as e:
CmdHoboNotify.process_notification(notification)
assert e.value.args[0][0] == KeyError
assert e.value.args[0][1].args == ('user without uuid',)
assert e.type == KeyError
assert e.value.args == ('user without uuid',)
def test_process_notification_role_with_errors(pub):

View File

@ -5986,8 +5986,10 @@ def test_create_formdata(two_pubs):
if two_pubs.is_using_postgresql():
errors = two_pubs.loggederror_class.select()
assert len(errors) == 2
assert any('form_var_toto_string' in (error.exception_message or '') for error in errors)
assert any('Missing field' in error.summary for error in errors)
assert 'form_var_toto_string' in errors[0].exception_message
assert 'Missing field' in errors[1].summary
assert errors[0].formdata_id == str(target_formdef.data_class().select()[0].id)
assert errors[1].formdata_id == str(target_formdef.data_class().select()[0].id)
# no tracking code has been created
created_formdata = target_formdef.data_class().select()[0]
@ -6370,6 +6372,7 @@ def test_create_carddata_user_association(two_pubs):
assert two_pubs.loggederror_class.count() == 1
logged_error = two_pubs.loggederror_class.select()[0]
assert logged_error.summary == 'Failed to attach user (not found: "zzz")'
assert logged_error.formdata_id == str(carddef.data_class().select()[0].id)
# user association on invalid template
carddef.data_class().wipe()

View File

@ -220,7 +220,7 @@ def test_webservice_on_error_with_sql(http_requests, emails, notify_on_errors, r
assert 'Foo Bar ' in resp.text
if notify_on_errors:
assert emails.count() == 1
assert "[ERROR] [WSCALL] Exception: %s whatever" % status_code in emails.emails
assert "[ERROR] [WSCALL] %s whatever" % status_code in emails.emails
emails.empty()
else:
assert emails.count() == 0

View File

@ -17,7 +17,6 @@
import datetime
import json
import re
import sys
import time
import urllib.parse
@ -1119,12 +1118,8 @@ class AutocompleteDirectory(Directory):
if 'data_source' in info:
error_summary = 'Error loading JSON data source (%s)' % str(e)
data_source = NamedDataSource.get(info['data_source'])
try:
raise Exception(error_summary) from e
except Exception:
exc_info = sys.exc_info()
get_publisher().notify_of_exception(
exc_info,
get_publisher().record_error(
error_summary,
context='[DATASOURCE]',
notify=data_source.notify_on_errors,
record=data_source.record_on_errors,

View File

@ -175,8 +175,8 @@ class CmdHoboNotify(Command):
if field.convert_value_from_anything:
try:
field_value = field.convert_value_from_anything(field_value)
except ValueError:
publisher.notify_of_exception(sys.exc_info(), context='[PROVISIONNING]')
except ValueError as e:
publisher.record_error(exception=e, context='[PROVISIONNING]', notify=True)
continue
user.form_data[field.id] = field_value
user.name_identifiers = [uuid]
@ -202,8 +202,8 @@ class CmdHoboNotify(Command):
users = User.get_users_with_name_identifier(o['uuid'])
for user in users:
user.set_deleted()
except Exception:
publisher.notify_of_exception(sys.exc_info(), context='[PROVISIONNING]')
except Exception as e:
publisher.record_error(exception=e, context='[PROVISIONNING]', notify=True)
CmdHoboNotify.register()

View File

@ -16,7 +16,6 @@
import collections
import hashlib
import sys
import urllib.parse
import xml.etree.ElementTree as ET
@ -170,7 +169,6 @@ def get_json_from_url(url, data_source=None, log_message_part='JSON data source'
data_key = data_source.get('data_attribute') or 'data'
geojson = data_source.get('type') == 'geojson'
error_summary = None
exc = None
try:
entries = misc.json_loads(misc.urlopen(url).read())
@ -194,18 +192,12 @@ def get_json_from_url(url, data_source=None, log_message_part='JSON data source'
return entries
except misc.ConnectionError as e:
error_summary = 'Error loading %s (%s)' % (log_message_part, str(e))
exc = e
except (ValueError, TypeError) as e:
error_summary = 'Error reading %s output (%s)' % (log_message_part, str(e))
exc = e
if data_source and (data_source.get('record_on_errors') or data_source.get('notify_on_errors')):
try:
raise Exception(error_summary) from exc
except Exception:
exc_info = sys.exc_info()
get_publisher().notify_of_exception(
exc_info,
if data_source:
get_publisher().record_error(
error_summary,
context='[DATASOURCE]',
notify=data_source.get('notify_on_errors'),
record=data_source.get('record_on_errors'),
@ -312,14 +304,8 @@ def get_structured_items(data_source, mode=None):
# noqa pylint: disable=eval-used
value = eval(data_source.get('value'), global_eval_dict, variables)
if not isinstance(value, collections.Iterable):
try:
raise Exception(
'Python data source (%r) gave a non-iterable result' % data_source.get('value')
)
except Exception:
exc_info = sys.exc_info()
get_publisher().notify_of_exception(
exc_info,
get_publisher().record_error(
'Python data source (%r) gave a non-iterable result' % data_source.get('value'),
context='[DATASOURCE]',
notify=data_source.get('notify_on_errors'),
record=data_source.get('record_on_errors'),
@ -340,14 +326,9 @@ def get_structured_items(data_source, mode=None):
return [{'id': x, 'text': x} for x in value]
return value
except Exception as exc:
try:
raise Exception(
'Failed to eval() Python data source (%r)' % data_source.get('value')
) from exc
except Exception:
exc_info = sys.exc_info()
get_publisher().notify_of_exception(
exc_info,
get_publisher().record_error(
'Failed to eval() Python data source (%r)' % data_source.get('value'),
exception=exc,
context='[DATASOURCE]',
notify=data_source.get('notify_on_errors'),
record=data_source.get('record_on_errors'),

View File

@ -94,24 +94,34 @@ class LoggedError:
return error
@classmethod
def record_exception(cls, error_summary, plain_error_msg, publisher):
try:
context = publisher.substitutions.get_context_variables()
except Exception:
return
formdata_id = context.get('form_number_raw')
formdef_urlname = context.get('form_slug')
if formdef_urlname:
klass = FormDef
if context.get('form_class_name') == 'CardDef':
klass = CardDef
formdef = klass.get_by_urlname(formdef_urlname)
formdata = formdef.data_class().get(formdata_id, ignore_errors=True)
workflow = formdef.workflow
else:
formdef = formdata = workflow = None
def record_error(cls, error_summary, plain_error_msg, publisher, *args, **kwargs):
formdef = kwargs.pop('formdef', None)
formdata = kwargs.pop('formdata', None)
workflow = kwargs.pop('workflow', None)
if not any([formdef, formdata, workflow]):
try:
context = publisher.substitutions.get_context_variables()
except Exception:
return
formdata_id = context.get('form_number_raw')
formdef_urlname = context.get('form_slug')
if formdef_urlname:
klass = FormDef
if context.get('form_class_name') == 'CardDef':
klass = CardDef
formdef = klass.get_by_urlname(formdef_urlname)
formdata = formdef.data_class().get(formdata_id, ignore_errors=True)
workflow = formdef.workflow
else:
formdef = formdata = workflow = None
return cls.record(
error_summary, plain_error_msg, formdata=formdata, formdef=formdef, workflow=workflow
error_summary,
plain_error_msg,
formdata=formdata,
formdef=formdef,
workflow=workflow,
*args,
**kwargs,
)
def build_tech_id(self):

View File

@ -14,6 +14,7 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import io
import json
import os
import pickle
@ -350,45 +351,61 @@ class WcsPublisher(QommonPublisher):
conn, cur = sql.get_connection_and_cursor()
sql.drop_views(None, conn, cur)
for formdef in FormDef.select() + CardDef.select():
sql.do_formdef_tables(formdef)
for _formdef in FormDef.select() + CardDef.select():
sql.do_formdef_tables(_formdef)
sql.migrate_global_views(conn, cur)
conn.commit()
cur.close()
def notify_of_exception(self, exc_tuple, context=None, record=True, notify=True):
exc_type, exc_value, tb = exc_tuple
error_summary = traceback.format_exception_only(exc_type, exc_value)
error_summary = error_summary[0][0:-1] # de-listify and strip newline
def record_error(
self, error_summary=None, context=None, exception=None, record=True, notify=False, *args, **kwargs
):
if not record and not notify:
return
if exception is not None:
exc_type, exc_value, tb = sys.exc_info()
if not error_summary:
error_summary = traceback.format_exception_only(exc_type, exc_value)
error_summary = error_summary[0][0:-1] # de-listify and strip newline
plain_error_msg = str(
self._generate_plaintext_error(get_request(), self, exc_type, exc_value, tb)
)
else:
error_file = io.StringIO()
print('Stack trace (most recent call first):', file=error_file)
stack_summary = traceback.extract_stack()
stack_summary.reverse()
traceback.print_list(stack_summary[1:], file=error_file)
if get_request():
error_file.write('\n')
error_file.write(get_request().dump())
error_file.write('\n')
plain_error_msg = error_file.getvalue()
if context:
error_summary = '%s %s' % (context, error_summary)
if error_summary is None:
return
plain_error_msg = str(self._generate_plaintext_error(get_request(), self, exc_type, exc_value, tb))
self.log_internal_error(error_summary, plain_error_msg, record=record, notify=notify)
def log_internal_error(self, error_summary, plain_error_msg, record=False, notify=True):
tech_id = None
logged_exception = None
if record and self.loggederror_class:
logged_exception = self.loggederror_class.record_exception(
error_summary, plain_error_msg, publisher=self
logged_exception = self.loggederror_class.record_error(
error_summary, plain_error_msg, publisher=self, exception=exception, *args, **kwargs
)
if logged_exception:
tech_id = logged_exception.tech_id
if not notify:
if not notify or logged_exception and logged_exception.occurences_count > 1:
# notify only first occurence
return
try:
self.logger.log_internal_error(error_summary, plain_error_msg, tech_id)
self.logger.log_internal_error(
error_summary, plain_error_msg, logged_exception.tech_id if logged_exception else None
)
except OSError:
# Could happen if there is no mail server available and exceptions
# were configured to be mailed. (formerly socket.error)
# Could also could happen on file descriptor exhaustion.
pass
def record_error(self, *args, **kwargs):
if self.loggederror_class:
self.loggederror_class.record(*args, **kwargs)
def apply_global_action_timeouts(self):
from wcs.workflows import Workflow, WorkflowGlobalActionTimeoutTrigger

View File

@ -95,8 +95,8 @@ class AfterJob(StorableObject):
self.execute()
else:
self.job_cmd(job=self)
except Exception:
get_publisher().notify_of_exception(sys.exc_info())
except Exception as e:
get_publisher().record_error(exception=e, notify=True)
self.exception = traceback.format_exc()
self.status = N_('failed')
else:

View File

@ -14,8 +14,6 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import sys
from django.conf import settings
@ -69,5 +67,5 @@ def cron_worker(publisher, now, job_name=None):
publisher.substitutions.feed(extra_source(publisher, None))
try:
job.function(publisher)
except Exception:
publisher.notify_of_exception(sys.exc_info(), context='[CRON]')
except Exception as e:
publisher.record_error(exception=e, context='[CRON]', notify=True)

View File

@ -16,7 +16,6 @@
import base64
import hashlib
import sys
import urllib.parse
import uuid
@ -415,8 +414,8 @@ class FCAuthMethod(AuthMethod):
try:
value = WorkflowStatusItem.compute(value, context=user_info)
except Exception:
get_publisher().notify_of_exception(sys.exc_info(), context='[FC-user-compute]')
except Exception as e:
get_publisher().record_error(exception=e, context='[FC-user-compute]', notify=True)
continue
if field_varname == '__name':
user.name = value

View File

@ -15,7 +15,6 @@
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import os
import sys
import time
import urllib.parse
from xml.sax.saxutils import escape
@ -299,8 +298,8 @@ class Saml2Directory(Directory):
!= get_cfg('sp', {}).get('saml2_base_url') + get_request().get_url()[last_slash:]
):
return error_page('SubjectConfirmation Recipient Mismatch')
except Exception:
get_publisher().notify_of_exception(sys.exc_info(), context='[SAML]')
except Exception as e:
get_publisher().record_error(exception=e, context='[SAML]', notify=True)
return error_page('Error checking SubjectConfirmation Recipient')
assertions_dir = os.path.join(get_publisher().app_dir, 'assertions')
@ -339,8 +338,8 @@ class Saml2Directory(Directory):
return error_page('Assertion received too early')
if not_on_or_after and current_time > not_on_or_after:
return error_page('Assertion expired')
except Exception:
get_publisher().notify_of_exception(sys.exc_info(), context='[SAML]')
except Exception as e:
get_publisher().record_error(exception=e, context='[SAML]', notify=True)
return error_page('Error checking Assertion Time')
# TODO: check for unknown conditions
@ -491,8 +490,8 @@ class Saml2Directory(Directory):
if field and field.convert_value_from_anything:
try:
field_value = field.convert_value_from_anything(field_value)
except ValueError:
get_publisher().notify_of_exception(sys.exc_info(), context='[SAML]')
except ValueError as e:
get_publisher().record_error(exception=e, context='[SAML]', notify=True)
continue
if user.form_data.get(field_id) != field_value:
user.form_data[field_id] = field_value

View File

@ -708,10 +708,10 @@ class StorableObject:
with locket.lock_file(objects_dir + '.lock.index'):
try:
self.update_indexes(previous_object_value, relative_object_filename)
except Exception:
except Exception as e:
# something failed, we can't keep using possibly broken indexes, so
# we notify of the bug and remove the indexes
get_publisher().notify_of_exception(sys.exc_info(), context='[STORAGE]')
get_publisher().record_error(exception=e, context='[STORAGE]', notify=True)
self.destroy_indexes()
@classmethod

View File

@ -16,7 +16,6 @@
import datetime
import json
import sys
import time
import urllib.parse
import xml.etree.ElementTree as ET
@ -158,8 +157,8 @@ class UpdateUserProfileStatusItem(WorkflowStatusItem):
if field and field.convert_value_from_anything:
try:
field_value = field.convert_value_from_anything(field_value)
except ValueError:
get_publisher().notify_of_exception(sys.exc_info(), context='[PROFILE]')
except ValueError as e:
get_publisher().record_error(exception=e, context='[PROFILE]', notify=True)
# invalid attribute, do not update it
del new_data[field.varname]
continue
@ -188,8 +187,8 @@ class UpdateUserProfileStatusItem(WorkflowStatusItem):
user_uuid = user.name_identifiers[0]
try:
url = user_ws_url(user_uuid)
except MissingSecret:
get_publisher().notify_of_exception(sys.exc_info(), context='[PROFILE]')
except MissingSecret as e:
get_publisher().record_error(exception=e, context='[PROFILE]', notify=True)
return
payload = new_data.copy()

View File

@ -14,8 +14,6 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import sys
from quixote import get_publisher
from quixote.html import htmltext
@ -121,8 +119,8 @@ class RegisterCommenterWorkflowStatusItem(WorkflowStatusItem):
# needed by AttachmentEvolutionPart.from_upload()
upload.get_file_pointer()
formdata.evolution[-1].add_part(AttachmentEvolutionPart.from_upload(upload, to=to))
except Exception:
get_publisher().notify_of_exception(sys.exc_info(), context='[comment/attachments]')
except Exception as e:
get_publisher().record_error(exception=e, context='[comment/attachments]', notify=True)
continue
def perform(self, formdata):

View File

@ -14,7 +14,6 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import sys
import urllib.parse
from quixote import get_publisher, get_request, get_response
@ -105,8 +104,8 @@ class AddRoleWorkflowStatusItem(WorkflowStatusItem):
user_uuid = user.name_identifiers[0]
try:
url = roles_ws_url(role_uuid, user_uuid)
except MissingSecret:
get_publisher().notify_of_exception(sys.exc_info(), context='[ROLES]')
except MissingSecret as e:
get_publisher().record_error(exception=e, context='[ROLES]', notify=True)
return
def after_job(job=None):
@ -175,8 +174,8 @@ class RemoveRoleWorkflowStatusItem(WorkflowStatusItem):
user_uuid = user.name_identifiers[0]
try:
url = roles_ws_url(role_uuid, user_uuid)
except MissingSecret:
get_publisher().notify_of_exception(sys.exc_info(), context='[ROLES]')
except MissingSecret as e:
get_publisher().record_error(exception=e, context='[ROLES]', notify=True)
return
def after_job(job=None):

View File

@ -392,7 +392,7 @@ class WebserviceCallStatusItem(WorkflowStatusItem):
workflow_data['%s_connection_error' % self.varname] = str(e)
formdata.update_workflow_data(workflow_data)
formdata.store()
self.action_on_error(self.action_on_network_errors, formdata, exc_info=sys.exc_info())
self.action_on_error(self.action_on_network_errors, formdata, exception=e)
return
app_error_code = get_app_error_code(response, data, self.response_type)
@ -452,12 +452,10 @@ class WebserviceCallStatusItem(WorkflowStatusItem):
if self.response_type == 'json':
try:
d = json_loads(force_text(data))
except (ValueError, TypeError):
except (ValueError, TypeError) as e:
formdata.update_workflow_data(workflow_data)
formdata.store()
self.action_on_error(
self.action_on_bad_data, formdata, response, data=data, exc_info=sys.exc_info()
)
self.action_on_error(self.action_on_bad_data, formdata, response, data=data, exception=e)
else:
workflow_data['%s_response' % self.varname] = d
if isinstance(d, dict) and self.method == 'POST':
@ -480,25 +478,25 @@ class WebserviceCallStatusItem(WorkflowStatusItem):
)
formdata.evolution[-1].add_part(attachment)
def action_on_error(self, action, formdata, response=None, data=None, exc_info=None):
def action_on_error(self, action, formdata, response=None, data=None, exception=None):
if action in (':pass', ':stop') and (
self.notify_on_errors or self.record_on_errors or self.record_errors
):
if exc_info:
summary = traceback.format_exception_only(exc_info[0], exc_info[1])[-1]
else:
if exception is None:
summary = '<no response>'
if response is not None:
summary = '%s %s' % (response.status_code, response.reason)
try:
raise Exception(summary)
except Exception:
exc_info = sys.exc_info()
else:
exc_type, exc_value = sys.exc_info()[:2]
summary = traceback.format_exception_only(exc_type, exc_value)[-1]
if self.notify_on_errors or self.record_on_errors:
get_publisher().notify_of_exception(
exc_info, context='[WSCALL]', notify=self.notify_on_errors, record=self.record_on_errors
)
get_publisher().record_error(
error_summary=summary,
exception=exception,
context='[WSCALL]',
notify=self.notify_on_errors,
record=self.record_on_errors,
)
if self.record_errors and formdata.evolution:
formdata.evolution[-1].add_part(JournalWsCallErrorPart(summary, self.label, data))
formdata.store()
@ -510,15 +508,15 @@ class WebserviceCallStatusItem(WorkflowStatusItem):
# verify that target still exist
try:
self.parent.parent.get_status(action)
except KeyError:
try:
raise IndexError(
'reference to invalid status %r in workflow %r, status %r'
% (action, self.parent.parent.name, self.parent.name)
)
except IndexError:
get_publisher().notify_of_exception(sys.exc_info(), context='[WSCALL]')
raise AbortActionException()
except KeyError as e:
get_publisher().record_error(
'reference to invalid status %r in workflow %r, status %r'
% (action, self.parent.parent.name, self.parent.name),
exception=e,
context='[WSCALL]',
notify=True,
)
raise AbortActionException()
formdata.status = 'wf-%s' % action
formdata.store()

View File

@ -21,7 +21,6 @@ import datetime
import itertools
import os
import random
import sys
import time
import uuid
import xml.etree.ElementTree as ET
@ -1352,13 +1351,13 @@ class WorkflowGlobalActionTimeoutTrigger(WorkflowGlobalActionTrigger):
try:
# noqa pylint: disable=eval-used
anchor_date = eval(self.anchor_expression, get_publisher().get_global_eval_dict(), variables)
except Exception:
except Exception as e:
# get the variables in the locals() namespace so they are
# displayed within the trace.
expression = self.anchor_expression # noqa pylint: disable=unused-variable
# noqa pylint: disable=unused-variable
global_variables = get_publisher().get_global_eval_dict()
get_publisher().notify_of_exception(sys.exc_info(), context='[TIMEOUTS]')
get_publisher().record_error(exception=e, context='[TIMEOUTS]', notify=True)
# convert anchor_date to datetime.datetime()
if isinstance(anchor_date, datetime.datetime):
@ -1372,15 +1371,15 @@ class WorkflowGlobalActionTimeoutTrigger(WorkflowGlobalActionTrigger):
elif isinstance(anchor_date, str) and anchor_date:
try:
anchor_date = get_as_datetime(anchor_date)
except ValueError:
get_publisher().notify_of_exception(sys.exc_info(), context='[TIMEOUTS]')
except ValueError as e:
get_publisher().record_error(exception=e, context='[TIMEOUTS]', notify=True)
anchor_date = None
elif anchor_date:
# timestamp
try:
anchor_date = datetime.datetime.fromtimestamp(anchor_date)
except TypeError:
get_publisher().notify_of_exception(sys.exc_info(), context='[TIMEOUTS]')
except TypeError as e:
get_publisher().record_error(exception=e, context='[TIMEOUTS]', notify=True)
anchor_date = None
if not anchor_date:
@ -2363,8 +2362,8 @@ class WorkflowStatusItem(XmlSerialisable):
try:
attachment = WorkflowStatusItem.compute(attachment, allow_complex=True, raises=True)
except Exception:
get_publisher().notify_of_exception(sys.exc_info(), context='[workflow/attachments]')
except Exception as e:
get_publisher().record_error(exception=e, context='[workflow/attachments]', notify=True)
else:
if attachment:
complex_value = get_publisher().get_cached_complex_data(attachment)
@ -2385,8 +2384,8 @@ class WorkflowStatusItem(XmlSerialisable):
# and magically convert string like 'form_var_*_raw' to a PicklableUpload
# noqa pylint: disable=eval-used
picklableupload = eval(attachment, global_eval_dict, local_eval_dict)
except Exception:
get_publisher().notify_of_exception(sys.exc_info(), context='[workflow/attachments]')
except Exception as e:
get_publisher().record_error(exception=e, context='[workflow/attachments]', notify=True)
continue
if not picklableupload:
@ -2400,8 +2399,8 @@ class WorkflowStatusItem(XmlSerialisable):
if not isinstance(upload, PicklableUpload):
try:
upload = FileField.convert_value_from_anything(upload)
except ValueError:
get_publisher().notify_of_exception(sys.exc_info(), context='[workflow/attachments]')
except ValueError as e:
get_publisher().record_error(exception=e, context='[workflow/attachments]', notify=True)
continue
yield upload

View File

@ -16,7 +16,6 @@
import collections
import json
import sys
import urllib.parse
import xml.etree.ElementTree as ET
@ -99,8 +98,8 @@ def call_webservice(
try:
value = WorkflowStatusItem.compute(value, raises=True)
value = str(value)
except Exception:
get_publisher().notify_of_exception(sys.exc_info())
except Exception as e:
get_publisher().record_error(exception=e, notify=True)
else:
key = force_str(key)
value = force_str(value)
@ -130,8 +129,8 @@ def call_webservice(
for (key, value) in post_data.items():
try:
payload[key] = WorkflowStatusItem.compute(value, allow_complex=True, raises=True)
except Exception:
get_publisher().notify_of_exception(sys.exc_info())
except Exception as e:
get_publisher().record_error(exception=e, notify=True)
else:
if payload[key]:
payload[key] = get_publisher().get_cached_complex_data(payload[key])
@ -163,11 +162,9 @@ def call_webservice(
except ConnectionError as e:
if not handle_connection_errors:
raise e
if notify_on_errors or record_on_errors:
exc_info = sys.exc_info()
get_publisher().notify_of_exception(
exc_info, context='[WSCALL]', notify=notify_on_errors, record=record_on_errors
)
get_publisher().record_error(
exception=e, context='[WSCALL]', notify=notify_on_errors, record=record_on_errors
)
return (None, None, None)
app_error_code = get_app_error_code(response, data, 'json')
@ -176,12 +173,8 @@ def call_webservice(
summary = '<no response>'
if response is not None:
summary = '%s %s' % (status, response.reason)
try:
raise Exception(summary)
except Exception:
exc_info = sys.exc_info()
get_publisher().notify_of_exception(
exc_info, context='[WSCALL]', notify=notify_on_errors, record=record_on_errors
get_publisher().record_error(
summary, context='[WSCALL]', notify=notify_on_errors, record=record_on_errors
)
return (response, status, data)