From ba5d718042a89dfb362956ea795623539a340dbc Mon Sep 17 00:00:00 2001 From: Benjamin Dauvergne Date: Fri, 4 Jul 2014 00:36:23 +0200 Subject: [PATCH] Improve error reporting in data source renderer when TEMPLATE_DEBUG is True (fixes #5092) This commit also normalize logging level to warning for non critical errors. --- src/cmsplugin_blurp/renderers/data_source.py | 109 ++++++++++--------- 1 file changed, 59 insertions(+), 50 deletions(-) diff --git a/src/cmsplugin_blurp/renderers/data_source.py b/src/cmsplugin_blurp/renderers/data_source.py index cf2a8b2..4bbf38b 100644 --- a/src/cmsplugin_blurp/renderers/data_source.py +++ b/src/cmsplugin_blurp/renderers/data_source.py @@ -9,6 +9,7 @@ import requests from requests.exceptions import RequestException, HTTPError, Timeout from django.core.cache import cache +from django.conf import settings from . import signature, template @@ -170,44 +171,64 @@ class Data(object): timeout=self.timeout, stream=True) request.raise_for_status() - return request.raw + return request.raw, None except HTTPError: - log.warning('HTTP Error %s when loading URL %s for renderer %r', + error = 'HTTP Error %s when loading URL %s for renderer %r' % ( request.status_code, self.final_url, self.slug) + log.warning(error) except Timeout: - log.warning('HTTP Request timeout(%s s) when loading URL ' - '%s for renderer %s', - self.timeout, - self.final_url, - self.slug) + error = 'HTTP Request timeout(%s s) when loading URL ' \ + '%s for renderer %s' % ( + self.timeout, + self.final_url, + self.slug) + log.warning(error) except RequestException: - log.warning('HTTP Request failed when loading URL ' - '%s for renderer %r', - self.final_url, - self.slug) + error = 'HTTP Request failed when loading URL ' \ + '%s for renderer %r' % ( + self.final_url, + self.slug) + log.warning(error) + return None, error def resolve_file_url(self): path = self.url[7:] try: return file(path), None except Exception: - log.exception('unable to resolve file URL: %r', self.url) + error = 'unable to resolve file URL: %r' % self.url + log.warning(error) + return None, error + def update_content(self): content = None if self.url.startswith('http'): - stream = self.resolve_http_url() + stream, error = self.resolve_http_url() elif self.url.startswith('file:'): - stream = self.resolve_file_url() + stream, error = self.resolve_file_url() else: - log.error('unknown scheme: %r', self.url) + msg = 'unknown scheme: %r' % self.url + log.error(msg) + if settings.TEMPLATE_DEBUG: + return msg return if stream is None: + log.error(error) + if settings.TEMPLATE_DEBUG: + return error return - data = getattr(self, 'parse_'+self.parser_type)(stream) + try: + data = getattr(self, 'parse_'+self.parser_type)(stream) + except Exception: + msg = 'error parsing %s content on %s' % (self.parser_type, self.url) + log.exception(msg) + if settings.TEMPLATE_DEBUG: + return msg + return None if self.refresh and content is not None: cache.set(self.key, (data, self.now+self.refresh), 86400*12) log.debug('finished') @@ -256,55 +277,43 @@ class Data(object): def parse_json(self, stream): import json - try: - return json.load(stream) - except ValueError, e: - log.exception('unparsable JSON content %s', e) + return json.load(stream) def parse_rss(self, stream): - try: - result = feedparser.parse(stream.read()) - entries = result.entries - entries = sorted(result.entries, key=lambda e: e['updated_parsed']) - result.entries = entries[:self.limit] - return result - except Exception, e: - log.exception('unparsable RSS content %s', e) + result = feedparser.parse(stream.read()) + entries = result.entries + entries = sorted(result.entries, key=lambda e: e['updated_parsed']) + result.entries = entries[:self.limit] + return result def parse_raw(self, stream): return stream.read() def parse_xml(self, stream): - try: - return ET.fromstring(stream.read()) - except Exception, e: - log.exception('unparsable XML content', e) + return ET.fromstring(stream.read()) def parse_csv(self, stream): import csv - try: - params = self.source.get('csv_params', {}) - encoding = self.source.get('csv_encoding', 'utf-8') + params = self.source.get('csv_params', {}) + encoding = self.source.get('csv_encoding', 'utf-8') - def list_decode(l): - return map(lambda s: s.decode(encoding), l) + def list_decode(l): + return map(lambda s: s.decode(encoding), l) - def dict_decode(d): - return dict((a, b.decode(encoding)) for a, b in d.iteritems()) + def dict_decode(d): + return dict((a, b.decode(encoding)) for a, b in d.iteritems()) - if hasattr(stream, 'iter_lines'): - stream = stream.iter_lines() + if hasattr(stream, 'iter_lines'): + stream = stream.iter_lines() - if 'fieldnames' in params: - reader = csv.DictReader(stream, **params) - decoder = dict_decode - else: - reader = csv.reader(stream, **params) - decoder = list_decode - return list(decoder(e) for e in reader) - except Exception, e: - log.exception('unparsable CSV content') + if 'fieldnames' in params: + reader = csv.DictReader(stream, **params) + decoder = dict_decode + else: + reader = csv.reader(stream, **params) + decoder = list_decode + return list(decoder(e) for e in reader) def __call__(self): return self.get_content()