452 lines
16 KiB
Python
452 lines
16 KiB
Python
# w.c.s. - web application for online forms
|
|
# Copyright (C) 2005-2010 Entr'ouvert
|
|
#
|
|
# This program is free software; you can redistribute it and/or modify
|
|
# it under the terms of the GNU General Public License as published by
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
# (at your option) any later version.
|
|
#
|
|
# This program is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
# GNU General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU General Public License
|
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
|
|
|
import json
|
|
import os
|
|
import pickle
|
|
import re
|
|
import sys
|
|
import traceback
|
|
import zipfile
|
|
from contextlib import contextmanager
|
|
|
|
from django.utils.encoding import force_text
|
|
|
|
from wcs.qommon import force_str
|
|
|
|
from . import custom_views, data_sources, formdef, sessions
|
|
from .admin import RootDirectory as AdminRootDirectory
|
|
from .backoffice import RootDirectory as BackofficeRootDirectory
|
|
from .Defaults import * # noqa pylint: disable=wildcard-import
|
|
from .qommon.cron import CronJob
|
|
from .qommon.publisher import QommonPublisher, get_request, set_publisher_class
|
|
from .roles import Role
|
|
from .root import RootDirectory
|
|
from .tracking_code import TrackingCode
|
|
from .users import User
|
|
|
|
try:
|
|
from .wcs_cfg import * # noqa pylint: disable=wildcard-import
|
|
except ImportError:
|
|
pass
|
|
|
|
|
|
class UnpicklerClass(pickle.Unpickler):
|
|
def find_class(self, module, name):
|
|
if module == 'qommon.form':
|
|
module = 'wcs.qommon.form'
|
|
elif module in ('formdata', 'formdef', 'roles', 'users', 'workflows'):
|
|
module = 'wcs.%s' % module
|
|
__import__(module)
|
|
mod = sys.modules[module]
|
|
if module == 'wcs.formdef' and name != 'FormDef' and not name.startswith('_wcs_'):
|
|
name = '_wcs_%s' % name
|
|
elif module == 'wcs.carddef' and name != 'CardDef' and not name.startswith('_wcs_'):
|
|
name = '_wcs_%s' % name
|
|
klass = getattr(mod, name)
|
|
return klass
|
|
|
|
|
|
class WcsPublisher(QommonPublisher):
|
|
APP_NAME = 'wcs'
|
|
APP_DIR = APP_DIR
|
|
DATA_DIR = DATA_DIR
|
|
ERROR_LOG = ERROR_LOG
|
|
USE_LONG_TRACES = USE_LONG_TRACES
|
|
missing_appdir_redirect = REDIRECT_ON_UNKNOWN_VHOST
|
|
|
|
supported_languages = ['fr', 'es', 'de']
|
|
|
|
root_directory_class = RootDirectory
|
|
backoffice_directory_class = BackofficeRootDirectory
|
|
admin_directory_class = AdminRootDirectory
|
|
|
|
session_manager_class = None
|
|
user_class = User
|
|
tracking_code_class = TrackingCode
|
|
unpickler_class = UnpicklerClass
|
|
|
|
complex_data_cache = None
|
|
|
|
@classmethod
|
|
def get_backoffice_module(cls):
|
|
from wcs import backoffice
|
|
|
|
return backoffice
|
|
|
|
@classmethod
|
|
def get_admin_module(cls):
|
|
from wcs import admin
|
|
|
|
return admin
|
|
|
|
@classmethod
|
|
def configure(cls, config):
|
|
if config.has_section('extra'):
|
|
for dummy, directory in config.items('extra'):
|
|
cls.register_extra_dir(directory)
|
|
if config.has_option("main", "app_dir"):
|
|
cls.APP_DIR = config.get("main", "app_dir")
|
|
if config.has_option("main", "data_dir"):
|
|
cls.DATA_DIR = config.get("main", "data_dir")
|
|
if config.has_option("main", "error_log"):
|
|
cls.ERROR_LOG = config.get("main", "error_log")
|
|
if config.has_option("main", "use_long_traces"):
|
|
cls.USE_LONG_TRACES = config.getboolean("main", "use_long_traces")
|
|
if config.has_option("main", "missing_appdir_redirect"):
|
|
cls.missing_appdir_redirect = config.get("main", "missing_appdir_redirect")
|
|
|
|
@classmethod
|
|
def register_cronjobs(cls):
|
|
super(WcsPublisher, cls).register_cronjobs()
|
|
# every hour: check for global action timeouts
|
|
cls.register_cronjob(
|
|
CronJob(cls.apply_global_action_timeouts, name='evaluate_global_action_timeouts', minutes=[0])
|
|
)
|
|
data_sources.register_cronjob()
|
|
formdef.register_cronjobs()
|
|
|
|
def is_using_postgresql(self):
|
|
return bool(self.has_site_option('postgresql') and self.cfg.get('postgresql', {}))
|
|
|
|
def set_config(self, request=None, skip_sql=False):
|
|
QommonPublisher.set_config(self, request=request)
|
|
if request:
|
|
request.response.charset = self.site_charset
|
|
|
|
# make sure permissions are set using strings
|
|
if self.cfg.get('admin-permissions'):
|
|
for key in self.cfg['admin-permissions'].keys():
|
|
if not self.cfg['admin-permissions'][key]:
|
|
continue
|
|
self.cfg['admin-permissions'][key] = [str(x) for x in self.cfg['admin-permissions'][key]]
|
|
|
|
import wcs.workflows
|
|
|
|
wcs.workflows.load_extra()
|
|
|
|
if self.is_using_postgresql() and not skip_sql:
|
|
from . import sql
|
|
|
|
self.user_class = sql.SqlUser
|
|
self.role_class = sql.Role
|
|
self.tracking_code_class = sql.TrackingCode
|
|
self.session_class = sql.Session
|
|
self.custom_view_class = sql.CustomView
|
|
self.snapshot_class = sql.Snapshot
|
|
self.loggederror_class = sql.LoggedError
|
|
sql.get_connection(new=True)
|
|
else:
|
|
self.user_class = User
|
|
self.role_class = Role
|
|
self.tracking_code_class = TrackingCode
|
|
self.session_class = sessions.BasicSession
|
|
self.custom_view_class = custom_views.CustomView
|
|
self.snapshot_class = None
|
|
self.loggederror_class = None
|
|
|
|
self.session_manager_class = sessions.StorageSessionManager
|
|
self.set_session_manager(self.session_manager_class(session_class=self.session_class))
|
|
|
|
def import_zip(self, fd):
|
|
z = zipfile.ZipFile(fd)
|
|
results = {
|
|
'formdefs': 0,
|
|
'carddefs': 0,
|
|
'workflows': 0,
|
|
'categories': 0,
|
|
'roles': 0,
|
|
'settings': 0,
|
|
'datasources': 0,
|
|
'wscalls': 0,
|
|
'mail-templates': 0,
|
|
'blockdefs': 0,
|
|
'apiaccess': 0,
|
|
}
|
|
|
|
def _decode_list(data):
|
|
rv = []
|
|
for item in data:
|
|
if isinstance(item, str):
|
|
item = force_str(item)
|
|
elif isinstance(item, list):
|
|
item = _decode_list(item)
|
|
elif isinstance(item, dict):
|
|
item = _decode_dict(item)
|
|
rv.append(item)
|
|
return rv
|
|
|
|
def _decode_dict(data):
|
|
rv = {}
|
|
for key, value in data.items():
|
|
key = force_str(key)
|
|
if isinstance(value, str):
|
|
value = force_str(value)
|
|
elif isinstance(value, list):
|
|
value = _decode_list(value)
|
|
elif isinstance(value, dict):
|
|
value = _decode_dict(value)
|
|
rv[key] = value
|
|
return rv
|
|
|
|
for f in z.namelist():
|
|
if f in ('.indexes', '.max_id'):
|
|
continue
|
|
if os.path.dirname(f) in (
|
|
'formdefs_xml',
|
|
'carddefs_xml',
|
|
'workflows_xml',
|
|
'blockdefs_xml',
|
|
'roles_xml',
|
|
):
|
|
continue
|
|
path = os.path.join(self.app_dir, f)
|
|
if not os.path.exists(os.path.dirname(path)):
|
|
os.mkdir(os.path.dirname(path))
|
|
if not os.path.basename(f):
|
|
# skip directories
|
|
continue
|
|
data = z.read(f)
|
|
if f in ('config.pck', 'config.json'):
|
|
results['settings'] = 1
|
|
if f == 'config.pck':
|
|
d = pickle.loads(data)
|
|
else:
|
|
d = json.loads(force_text(data), object_hook=_decode_dict)
|
|
if 'sp' in self.cfg:
|
|
current_sp = self.cfg['sp']
|
|
else:
|
|
current_sp = None
|
|
self.cfg = d
|
|
if current_sp:
|
|
self.cfg['sp'] = current_sp
|
|
elif 'sp' in self.cfg:
|
|
del self.cfg['sp']
|
|
self.write_cfg()
|
|
continue
|
|
open(path, 'wb').write(data)
|
|
if os.path.split(f)[0] in results:
|
|
results[os.path.split(f)[0]] += 1
|
|
|
|
# second pass, fields blocks
|
|
from wcs.blocks import BlockDef
|
|
|
|
for f in z.namelist():
|
|
if os.path.dirname(f) == 'blockdefs_xml' and os.path.basename(f):
|
|
blockdef = BlockDef.import_from_xml(z.open(f), include_id=True)
|
|
blockdef.store()
|
|
results['blockdefs'] += 1
|
|
|
|
# third pass, workflows
|
|
from wcs.workflows import Workflow
|
|
|
|
for f in z.namelist():
|
|
if os.path.dirname(f) == 'workflows_xml' and os.path.basename(f):
|
|
workflow = Workflow.import_from_xml(z.open(f), include_id=True, check_datasources=False)
|
|
workflow.store()
|
|
results['workflows'] += 1
|
|
|
|
# fourth pass, forms and cards
|
|
from wcs.carddef import CardDef
|
|
from wcs.formdef import FormDef
|
|
|
|
formdefs = []
|
|
carddefs = []
|
|
for f in z.namelist():
|
|
if os.path.dirname(f) == 'formdefs_xml' and os.path.basename(f):
|
|
formdef = FormDef.import_from_xml(z.open(f), include_id=True, check_datasources=False)
|
|
formdef.store()
|
|
formdefs.append(formdef)
|
|
results['formdefs'] += 1
|
|
if os.path.dirname(f) == 'carddefs_xml' and os.path.basename(f):
|
|
carddef = CardDef.import_from_xml(z.open(f), include_id=True, check_datasources=False)
|
|
carddef.store()
|
|
carddefs.append(carddef)
|
|
results['carddefs'] += 1
|
|
|
|
# sixth pass, roles
|
|
roles = []
|
|
for f in z.namelist():
|
|
if os.path.dirname(f) == 'roles_xml' and os.path.basename(f):
|
|
role = self.role_class.import_from_xml(z.open(f), include_id=True)
|
|
role.store()
|
|
roles.append(role)
|
|
results['roles'] += 1
|
|
|
|
# rebuild indexes for imported objects
|
|
for k, v in results.items():
|
|
if k == 'settings':
|
|
continue
|
|
if v == 0:
|
|
continue
|
|
klass = None
|
|
if k == 'formdefs':
|
|
from .formdef import FormDef
|
|
|
|
klass = FormDef
|
|
elif k == 'carddefs':
|
|
from .carddef import CardDef
|
|
|
|
klass = CardDef
|
|
elif k == 'blockdefs':
|
|
klass = BlockDef
|
|
elif k == 'categories':
|
|
from .categories import Category
|
|
|
|
klass = Category
|
|
elif k == 'roles':
|
|
klass = self.role_class
|
|
elif k == 'workflows':
|
|
klass = Workflow
|
|
if klass:
|
|
klass.rebuild_indexes()
|
|
|
|
if k == 'formdefs':
|
|
# in case of formdefs, we store them anew in case SQL changes
|
|
# are required.
|
|
for formdef in formdefs or FormDef.select():
|
|
formdef.store()
|
|
elif k == 'carddefs':
|
|
# ditto for cards
|
|
for carddef in carddefs or CardDef.select():
|
|
carddef.store()
|
|
|
|
z.close()
|
|
return results
|
|
|
|
def initialize_sql(self):
|
|
from . import sql
|
|
|
|
sql.get_connection(new=True)
|
|
sql.do_session_table()
|
|
sql.do_user_table()
|
|
sql.do_role_table()
|
|
sql.do_tracking_code_table()
|
|
sql.do_custom_views_table()
|
|
sql.do_snapshots_table()
|
|
sql.do_loggederrors_table()
|
|
sql.do_meta_table()
|
|
from .carddef import CardDef
|
|
from .formdef import FormDef
|
|
|
|
conn, cur = sql.get_connection_and_cursor()
|
|
sql.drop_views(None, conn, cur)
|
|
for formdef in FormDef.select() + CardDef.select():
|
|
sql.do_formdef_tables(formdef)
|
|
sql.migrate_global_views(conn, cur)
|
|
conn.commit()
|
|
cur.close()
|
|
|
|
def notify_of_exception(self, exc_tuple, context=None, record=True, notify=True):
|
|
exc_type, exc_value, tb = exc_tuple
|
|
error_summary = traceback.format_exception_only(exc_type, exc_value)
|
|
error_summary = error_summary[0][0:-1] # de-listify and strip newline
|
|
if context:
|
|
error_summary = '%s %s' % (context, error_summary)
|
|
|
|
plain_error_msg = str(self._generate_plaintext_error(get_request(), self, exc_type, exc_value, tb))
|
|
|
|
self.log_internal_error(error_summary, plain_error_msg, record=record, notify=notify)
|
|
|
|
def log_internal_error(self, error_summary, plain_error_msg, record=False, notify=True):
|
|
tech_id = None
|
|
if record and self.loggederror_class:
|
|
logged_exception = self.loggederror_class.record_exception(
|
|
error_summary, plain_error_msg, publisher=self
|
|
)
|
|
if logged_exception:
|
|
tech_id = logged_exception.tech_id
|
|
if not notify:
|
|
return
|
|
try:
|
|
self.logger.log_internal_error(error_summary, plain_error_msg, tech_id)
|
|
except OSError:
|
|
# Could happen if there is no mail server available and exceptions
|
|
# were configured to be mailed. (formerly socket.error)
|
|
# Could also could happen on file descriptor exhaustion.
|
|
pass
|
|
|
|
def record_error(self, *args, **kwargs):
|
|
if self.loggederror_class:
|
|
self.loggederror_class.record(*args, **kwargs)
|
|
|
|
def apply_global_action_timeouts(self):
|
|
from wcs.workflows import Workflow, WorkflowGlobalActionTimeoutTrigger
|
|
|
|
for workflow in Workflow.select():
|
|
WorkflowGlobalActionTimeoutTrigger.apply(workflow)
|
|
|
|
def migrate_sql(self):
|
|
from . import sql
|
|
|
|
sql.migrate()
|
|
|
|
def reindex_sql(self):
|
|
from . import sql
|
|
|
|
sql.reindex()
|
|
|
|
def cleanup(self):
|
|
if self.is_using_postgresql():
|
|
from . import sql
|
|
|
|
sql.cleanup_connection()
|
|
|
|
@contextmanager
|
|
def complex_data(self):
|
|
if self.has_site_option('complex-data'):
|
|
self.complex_data_cache = {}
|
|
try:
|
|
yield True
|
|
finally:
|
|
self.complex_data_cache = None
|
|
|
|
def cache_complex_data(self, value, rendered_value):
|
|
# Keep a temporary cache of assocations between a complex data value
|
|
# (value) and a string reprensentation (produced by django with
|
|
# django.template.base.render_value_in_context.
|
|
#
|
|
# It ensures string values are unique by appending a private unicode
|
|
# code point, that will be removed in wcs/qommon/template.py.
|
|
|
|
if self.complex_data_cache is None:
|
|
# it doesn't do anything unless initialized.
|
|
return value
|
|
|
|
str_value = rendered_value + chr(0xE000 + len(self.complex_data_cache))
|
|
self.complex_data_cache[str_value] = value
|
|
return str_value
|
|
|
|
def has_cached_complex_data(self, value):
|
|
return bool(value in (self.complex_data_cache or {}))
|
|
|
|
def get_cached_complex_data(self, value):
|
|
if not isinstance(value, str):
|
|
return value
|
|
if self.complex_data_cache is None:
|
|
return value
|
|
if value not in self.complex_data_cache:
|
|
return re.sub(r'[\uE000-\uF8FF]', '', value)
|
|
value_ = self.complex_data_cache.get(value)
|
|
if hasattr(value_, 'get_value'):
|
|
# unlazy variable
|
|
return value_.get_value()
|
|
return value_
|
|
|
|
|
|
set_publisher_class(WcsPublisher)
|
|
WcsPublisher.register_extra_dir(os.path.join(os.path.dirname(__file__), 'extra'))
|