304 lines
10 KiB
Python
304 lines
10 KiB
Python
# w.c.s. - web application for online forms
|
|
# Copyright (C) 2005-2010 Entr'ouvert
|
|
#
|
|
# This program is free software; you can redistribute it and/or modify
|
|
# it under the terms of the GNU General Public License as published by
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
# (at your option) any later version.
|
|
#
|
|
# This program is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
# GNU General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU General Public License
|
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
|
|
|
import cPickle
|
|
import json
|
|
import os
|
|
import random
|
|
import sys
|
|
import zipfile
|
|
|
|
from Defaults import *
|
|
|
|
try:
|
|
from wcs_cfg import *
|
|
except ImportError:
|
|
pass
|
|
|
|
from qommon.publisher import set_publisher_class, QommonPublisher
|
|
|
|
# this is terribly ugly but import RootDirectory will import a bunch of things,
|
|
# and some of them need a publisher to be set
|
|
class StubWcsPublisher(QommonPublisher):
|
|
pass
|
|
set_publisher_class(StubWcsPublisher)
|
|
|
|
|
|
from root import RootDirectory
|
|
from backoffice import RootDirectory as BackofficeRootDirectory
|
|
from admin import RootDirectory as AdminRootDirectory
|
|
import sessions
|
|
from qommon.cron import CronJob
|
|
|
|
from users import User
|
|
from tracking_code import TrackingCode
|
|
from logged_errors import LoggedError
|
|
|
|
import pickle
|
|
|
|
class UnpicklerClass(pickle.Unpickler):
|
|
def find_class(self, module, name):
|
|
if module == 'wcs.form':
|
|
module = 'qommon.form'
|
|
__import__(module)
|
|
mod = sys.modules[module]
|
|
klass = getattr(mod, name)
|
|
return klass
|
|
|
|
|
|
class WcsPublisher(StubWcsPublisher):
|
|
APP_NAME = 'wcs'
|
|
APP_DIR = APP_DIR
|
|
DATA_DIR = DATA_DIR
|
|
ERROR_LOG = ERROR_LOG
|
|
USE_LONG_TRACES = USE_LONG_TRACES
|
|
auto_create_appdir = AUTO_CREATE_VHOSTS
|
|
missing_appdir_redirect = REDIRECT_ON_UNKNOWN_VHOST
|
|
|
|
supported_languages = ['fr', 'es']
|
|
|
|
root_directory_class = RootDirectory
|
|
backoffice_directory_class = BackofficeRootDirectory
|
|
admin_directory_class = AdminRootDirectory
|
|
|
|
session_manager_class = sessions.StorageSessionManager
|
|
user_class = User
|
|
tracking_code_class = TrackingCode
|
|
unpickler_class = UnpicklerClass
|
|
|
|
@classmethod
|
|
def get_backoffice_module(cls):
|
|
import backoffice
|
|
return backoffice
|
|
|
|
@classmethod
|
|
def get_admin_module(cls):
|
|
import admin
|
|
return admin
|
|
|
|
@classmethod
|
|
def configure(cls, config):
|
|
if config.has_section('extra'):
|
|
for name, directory in config.items('extra'):
|
|
cls.register_extra_dir(directory)
|
|
if config.has_option("main", "app_dir"):
|
|
cls.APP_DIR = config.get("main", "app_dir")
|
|
if config.has_option("main", "data_dir"):
|
|
cls.DATA_DIR = config.get("main", "data_dir")
|
|
if config.has_option("main", "error_log"):
|
|
cls.ERROR_LOG = config.get("main", "error_log")
|
|
if config.has_option("main", "use_long_traces"):
|
|
cls.USE_LONG_TRACES = config.getboolean("main",
|
|
"use_long_traces")
|
|
if config.has_option("main", "auto_create_appdir"):
|
|
cls.auto_create_appdir = config.getboolean("main",
|
|
"auto_create_appdir")
|
|
if config.has_option("main", "missing_appdir_redirect"):
|
|
cls.missing_appdir_redirect = config.get("main",
|
|
"missing_appdir_redirect")
|
|
|
|
@classmethod
|
|
def register_cronjobs(cls):
|
|
super(WcsPublisher, cls).register_cronjobs()
|
|
# every hour: check for global action timeouts
|
|
cls.register_cronjob(CronJob(cls.apply_global_action_timeouts,
|
|
minutes=[random.randint(0, 59)]))
|
|
|
|
def is_using_postgresql(self):
|
|
return bool(self.has_site_option('postgresql') and self.cfg.get('postgresql', {}))
|
|
|
|
def set_config(self, request=None, skip_sql=False):
|
|
QommonPublisher.set_config(self, request = request)
|
|
filename = os.path.join(self.app_dir, 'config.pck')
|
|
if os.path.exists(filename):
|
|
# a configuration file exists, it may not have a charset
|
|
# specified so we default to iso-8859-15 as it was the only
|
|
# possible value
|
|
self.site_charset = self.cfg.get('misc', {}).get('charset', 'iso-8859-15')
|
|
else:
|
|
# new site, set the charset, and write it down for further
|
|
# requests
|
|
self.site_charset = 'utf-8'
|
|
self.cfg['misc'] = {'charset': 'utf-8'}
|
|
self.write_cfg()
|
|
if request:
|
|
request.response.charset = self.site_charset
|
|
|
|
# make sure permissions are set using strings
|
|
if self.cfg.get('admin-permissions'):
|
|
for key in self.cfg['admin-permissions'].keys():
|
|
if not self.cfg['admin-permissions'][key]:
|
|
continue
|
|
self.cfg['admin-permissions'][key] = [str(x) for x in self.cfg['admin-permissions'][key]]
|
|
|
|
import wcs.workflows
|
|
wcs.workflows.load_extra()
|
|
|
|
if self.is_using_postgresql() and not skip_sql:
|
|
import sql
|
|
self.user_class = sql.SqlUser
|
|
self.tracking_code_class = sql.TrackingCode
|
|
sql.get_connection(new=True)
|
|
else:
|
|
self.user_class = User
|
|
self.tracking_code_class = TrackingCode
|
|
|
|
def import_zip(self, fd):
|
|
z = zipfile.ZipFile(fd)
|
|
results = {'formdefs': 0, 'workflows': 0, 'categories': 0, 'roles': 0,
|
|
'settings': 0, 'datasources': 0, 'wscalls': 0}
|
|
|
|
def _decode_list(data):
|
|
rv = []
|
|
for item in data:
|
|
if isinstance(item, unicode):
|
|
item = item.encode(self.site_charset)
|
|
elif isinstance(item, list):
|
|
item = _decode_list(item)
|
|
elif isinstance(item, dict):
|
|
item = _decode_dict(item)
|
|
rv.append(item)
|
|
return rv
|
|
|
|
def _decode_dict(data):
|
|
rv = {}
|
|
for key, value in data.iteritems():
|
|
if isinstance(key, unicode):
|
|
key = key.encode(self.site_charset)
|
|
if isinstance(value, unicode):
|
|
value = value.encode(self.site_charset)
|
|
elif isinstance(value, list):
|
|
value = _decode_list(value)
|
|
elif isinstance(value, dict):
|
|
value = _decode_dict(value)
|
|
rv[key] = value
|
|
return rv
|
|
|
|
for f in z.namelist():
|
|
if '.indexes' in f:
|
|
continue
|
|
path = os.path.join(self.app_dir, f)
|
|
if not os.path.exists(os.path.dirname(path)):
|
|
os.mkdir(os.path.dirname(path))
|
|
if not os.path.basename(f):
|
|
# skip directories
|
|
continue
|
|
data = z.read(f)
|
|
if f in ('config.pck', 'config.json'):
|
|
results['settings'] = 1
|
|
if f == 'config.pck':
|
|
d = cPickle.loads(data)
|
|
else:
|
|
d = json.loads(data, object_hook=_decode_dict)
|
|
if self.cfg.has_key('sp'):
|
|
current_sp = self.cfg['sp']
|
|
else:
|
|
current_sp = None
|
|
self.cfg = d
|
|
if current_sp:
|
|
self.cfg['sp'] = current_sp
|
|
elif self.cfg.has_key('sp'):
|
|
del self.cfg['sp']
|
|
self.write_cfg()
|
|
continue
|
|
open(path, 'w').write(data)
|
|
if results.has_key(os.path.split(f)[0]):
|
|
results[os.path.split(f)[0]] += 1
|
|
|
|
# rebuild indexes for imported objects
|
|
for k, v in results.items():
|
|
if k == 'settings':
|
|
continue
|
|
if v == 0:
|
|
continue
|
|
klass = None
|
|
if k == 'formdefs':
|
|
from formdef import FormDef
|
|
klass = FormDef
|
|
elif k == 'categories':
|
|
from categories import Category
|
|
klass = Category
|
|
elif k == 'roles':
|
|
from roles import Role
|
|
klass = Role
|
|
elif k == 'workflows':
|
|
from wcs.workflows import Workflow
|
|
klass = Workflow
|
|
if klass:
|
|
klass.rebuild_indexes()
|
|
|
|
if k == 'formdefs':
|
|
# in case of formdefs, we store them anew in case SQL changes
|
|
# are required.
|
|
for formdef in FormDef.select():
|
|
formdef.store()
|
|
|
|
z.close()
|
|
return results
|
|
|
|
def init_publish(self, request):
|
|
if request.get_header('X_WCS_IFRAME_MODE', '') in ('true', 'yes'):
|
|
request.response.iframe_mode = True
|
|
return QommonPublisher.init_publish(self, request)
|
|
|
|
def get_object_visitors(self, object_key):
|
|
session_manager = self.session_manager_class()
|
|
return session_manager.session_class.get_object_visitors(object_key)
|
|
|
|
def get_visited_objects(self, exclude_user=None):
|
|
session_manager = self.session_manager_class()
|
|
return session_manager.session_class.get_visited_objects(
|
|
exclude_user=exclude_user)
|
|
|
|
def initialize_sql(self):
|
|
import sql
|
|
sql.get_connection(new=True)
|
|
sql.do_user_table()
|
|
sql.do_tracking_code_table()
|
|
sql.do_meta_table()
|
|
from formdef import FormDef
|
|
conn, cur = sql.get_connection_and_cursor()
|
|
sql.drop_views(None, conn, cur)
|
|
for formdef in FormDef.select():
|
|
sql.do_formdef_tables(formdef)
|
|
sql.migrate_global_views(conn, cur)
|
|
conn.commit()
|
|
cur.close()
|
|
|
|
def log_internal_error(self, error_summary, plain_error_msg, record=False):
|
|
super(WcsPublisher, self).log_internal_error(error_summary,
|
|
plain_error_msg, record=record)
|
|
if record:
|
|
LoggedError.record(error_summary, plain_error_msg, publisher=self)
|
|
|
|
def apply_global_action_timeouts(self):
|
|
from wcs.workflows import Workflow, WorkflowGlobalActionTimeoutTrigger
|
|
for workflow in Workflow.select():
|
|
WorkflowGlobalActionTimeoutTrigger.apply(workflow)
|
|
|
|
def migrate_sql(self):
|
|
import sql
|
|
sql.migrate()
|
|
|
|
def cleanup(self):
|
|
if self.is_using_postgresql():
|
|
import sql
|
|
sql.cleanup_connection()
|
|
|
|
set_publisher_class(WcsPublisher)
|
|
WcsPublisher.register_extra_dir(os.path.join(os.path.dirname(__file__), 'extra'))
|
|
|