2010-04-21 10:50:16 +02:00
|
|
|
# w.c.s. - web application for online forms
|
|
|
|
# Copyright (C) 2005-2010 Entr'ouvert
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2012-01-26 17:32:10 +01:00
|
|
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
2010-04-21 10:50:16 +02:00
|
|
|
|
2015-07-11 11:44:03 +02:00
|
|
|
import errno
|
2014-11-13 21:05:05 +01:00
|
|
|
import operator
|
2005-05-19 23:26:55 +02:00
|
|
|
import os
|
2009-04-14 09:25:01 +02:00
|
|
|
import time
|
2007-03-15 15:36:50 +01:00
|
|
|
import pickle
|
2009-08-31 14:45:17 +02:00
|
|
|
import os.path
|
2010-10-14 14:17:19 +02:00
|
|
|
import shutil
|
2013-04-15 15:23:30 +02:00
|
|
|
import sys
|
2009-09-21 16:11:57 +02:00
|
|
|
import tempfile
|
2009-08-31 14:45:17 +02:00
|
|
|
|
2019-11-14 15:06:26 +01:00
|
|
|
from django.utils import six
|
2019-11-17 10:29:35 +01:00
|
|
|
from django.utils.encoding import force_bytes
|
2019-09-29 20:53:23 +02:00
|
|
|
from django.utils.six.moves import _thread
|
|
|
|
|
|
|
|
from .vendor import locket
|
2005-05-19 23:26:55 +02:00
|
|
|
|
2005-05-24 20:03:39 +02:00
|
|
|
from quixote import get_publisher
|
2019-11-15 14:19:26 +01:00
|
|
|
from . import PICKLE_KWARGS
|
2005-05-24 20:03:39 +02:00
|
|
|
|
2015-04-16 11:32:10 +02:00
|
|
|
|
|
|
|
def cache_umask():
|
|
|
|
global process_umask
|
|
|
|
process_umask = os.umask(0)
|
|
|
|
os.umask(process_umask)
|
|
|
|
|
|
|
|
# cache umask when loading up the module
|
|
|
|
cache_umask()
|
|
|
|
|
2014-11-30 22:05:46 +01:00
|
|
|
def _take(objects, limit, offset=0):
|
|
|
|
for y in objects:
|
|
|
|
if offset:
|
|
|
|
offset -= 1
|
|
|
|
continue
|
|
|
|
if limit:
|
|
|
|
limit -= 1
|
|
|
|
elif limit == 0:
|
2009-11-21 15:13:33 +01:00
|
|
|
break
|
2014-11-30 22:05:46 +01:00
|
|
|
elif limit is None:
|
|
|
|
pass
|
2009-11-21 15:13:33 +01:00
|
|
|
yield y
|
|
|
|
|
2005-09-05 21:17:22 +02:00
|
|
|
def lax_int(s):
|
|
|
|
try:
|
|
|
|
return int(s)
|
|
|
|
except ValueError:
|
|
|
|
return -1
|
2005-07-15 20:05:40 +02:00
|
|
|
|
2005-09-05 21:17:22 +02:00
|
|
|
def fix_key(k):
|
2005-05-19 23:26:55 +02:00
|
|
|
# insure key can be inserted in filesystem
|
|
|
|
if not k: return k
|
|
|
|
return str(k).replace('/', '-')
|
|
|
|
|
2019-02-28 15:09:23 +01:00
|
|
|
def atomic_write(path, content, async_op=False):
|
2009-08-31 14:45:17 +02:00
|
|
|
'''Rewrite a complete file automatically, that is write to new file with
|
|
|
|
temporary name, fsync, then rename to final name. Use threads to remove blocking.'''
|
|
|
|
def doit():
|
2012-11-22 14:45:59 +01:00
|
|
|
dirname = os.path.dirname(path)
|
|
|
|
fd, temp = tempfile.mkstemp(dir=dirname,
|
|
|
|
prefix='.tmp-'+os.path.basename(path)+'-')
|
2015-04-16 11:32:10 +02:00
|
|
|
os.fchmod(fd, 0o666 & ~process_umask)
|
2019-11-12 11:07:26 +01:00
|
|
|
f = os.fdopen(fd, "wb")
|
2009-09-21 19:53:21 +02:00
|
|
|
if hasattr(content, 'read'):
|
|
|
|
# file pointer
|
|
|
|
def read100k():
|
|
|
|
return content.read(100000)
|
2019-11-12 23:37:02 +01:00
|
|
|
for piece in iter(read100k, b''):
|
2009-09-21 19:53:21 +02:00
|
|
|
f.write(piece)
|
|
|
|
else:
|
|
|
|
f.write(content)
|
2009-08-31 14:45:17 +02:00
|
|
|
f.flush()
|
|
|
|
os.fsync(f.fileno())
|
|
|
|
f.close()
|
|
|
|
os.rename(temp, path)
|
2019-02-28 15:09:23 +01:00
|
|
|
if async_op:
|
2009-09-15 21:20:32 +02:00
|
|
|
_thread.start_new_thread(doit, ())
|
|
|
|
else:
|
|
|
|
doit()
|
2005-05-19 23:26:55 +02:00
|
|
|
|
2014-11-13 21:05:05 +01:00
|
|
|
|
|
|
|
class Criteria(object):
|
|
|
|
def __init__(self, attribute, value):
|
|
|
|
self.attribute = attribute
|
|
|
|
self.value = value
|
2019-11-14 15:06:26 +01:00
|
|
|
# Python 3 requires comparisons to disparate types, this means we need
|
|
|
|
# to create a null value of the appropriate type, so None values can
|
|
|
|
# still be sorted.
|
|
|
|
self.typed_none = ''
|
|
|
|
if isinstance(self.value, bool):
|
|
|
|
self.typed_none = False
|
|
|
|
elif isinstance(self.value, six.integer_types + (float,)):
|
|
|
|
self.typed_none = -sys.maxsize
|
|
|
|
elif isinstance(self.value, time.struct_time):
|
|
|
|
self.typed_none = time.gmtime(-10**10) # 1653
|
2014-11-13 21:05:05 +01:00
|
|
|
|
|
|
|
def build_lambda(self):
|
2019-11-14 15:06:26 +01:00
|
|
|
return lambda x: self.op(getattr(x, self.attribute, None) or self.typed_none, self.value)
|
2014-11-13 21:05:05 +01:00
|
|
|
|
|
|
|
|
|
|
|
class Less(Criteria):
|
|
|
|
op = operator.lt
|
|
|
|
|
|
|
|
class Greater(Criteria):
|
|
|
|
op = operator.gt
|
|
|
|
|
|
|
|
class Equal(Criteria):
|
|
|
|
op = operator.eq
|
|
|
|
|
2014-11-22 11:30:49 +01:00
|
|
|
class NotEqual(Criteria):
|
|
|
|
op = operator.ne
|
|
|
|
|
2014-11-13 21:05:05 +01:00
|
|
|
class LessOrEqual(Criteria):
|
|
|
|
op = operator.le
|
|
|
|
|
|
|
|
class GreaterOrEqual(Criteria):
|
|
|
|
op = operator.ge
|
|
|
|
|
2014-11-22 13:13:38 +01:00
|
|
|
class Contains(Criteria):
|
|
|
|
op = operator.contains
|
|
|
|
|
|
|
|
def build_lambda(self):
|
2015-11-03 16:52:29 +01:00
|
|
|
return lambda x: self.op(self.value, getattr(x, self.attribute, ''))
|
2014-11-22 13:13:38 +01:00
|
|
|
|
|
|
|
class NotContains(Criteria):
|
|
|
|
op = operator.contains
|
|
|
|
|
|
|
|
def build_lambda(self):
|
2015-11-03 16:52:29 +01:00
|
|
|
return lambda x: not self.op(self.value, getattr(x, self.attribute, ''))
|
2014-11-22 13:13:38 +01:00
|
|
|
|
2014-12-01 18:44:55 +01:00
|
|
|
class Intersects(Criteria):
|
|
|
|
def build_lambda(self):
|
|
|
|
value = set(self.value)
|
2016-03-25 17:28:41 +01:00
|
|
|
def func(x):
|
|
|
|
try:
|
|
|
|
return value.intersection(set(getattr(x, self.attribute, []) or []))
|
|
|
|
except KeyError:
|
|
|
|
# this may happen if used to check a formdata field that didn't
|
|
|
|
# exist when the formdata was created.
|
|
|
|
return False
|
|
|
|
return func
|
2014-11-13 21:05:05 +01:00
|
|
|
|
2014-12-01 23:20:46 +01:00
|
|
|
class Or(Criteria):
|
2015-11-08 17:00:05 +01:00
|
|
|
def __init__(self, criterias, **kwargs):
|
2014-12-01 23:20:46 +01:00
|
|
|
self.criterias = criterias
|
|
|
|
|
|
|
|
def build_lambda(self):
|
|
|
|
func = lambda x: False
|
|
|
|
def combine_callables(x1, x2):
|
|
|
|
return lambda x: x1(x) or x2(x)
|
|
|
|
for element in self.criterias:
|
|
|
|
func = combine_callables(func, element.build_lambda())
|
|
|
|
return func
|
|
|
|
|
|
|
|
class And(Criteria):
|
2015-11-08 17:00:05 +01:00
|
|
|
def __init__(self, criterias, **kwargs):
|
2014-12-01 23:20:46 +01:00
|
|
|
self.criterias = criterias
|
|
|
|
|
|
|
|
def build_lambda(self):
|
|
|
|
func = lambda x: True
|
|
|
|
def combine_callables(x1, x2):
|
|
|
|
return lambda x: x1(x) and x2(x)
|
|
|
|
for element in self.criterias:
|
|
|
|
func = combine_callables(func, element.build_lambda())
|
|
|
|
return func
|
|
|
|
|
2014-12-22 19:57:54 +01:00
|
|
|
class ILike(Criteria):
|
|
|
|
def build_lambda(self):
|
2015-11-03 16:52:29 +01:00
|
|
|
return lambda x: self.value.lower() in (getattr(x, self.attribute, '') or '').lower()
|
2014-12-22 19:57:54 +01:00
|
|
|
|
2015-11-13 14:56:34 +01:00
|
|
|
class FtsMatch(Criteria):
|
|
|
|
def __init__(self, value):
|
|
|
|
self.value = value
|
|
|
|
|
|
|
|
def build_lambda(self):
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2016-04-13 13:50:49 +02:00
|
|
|
class NotNull(Criteria):
|
|
|
|
def __init__(self, attribute):
|
|
|
|
self.attribute = attribute
|
|
|
|
|
|
|
|
def build_lambda(self):
|
|
|
|
return lambda x: getattr(x, self.attribute, None) is not None
|
|
|
|
|
|
|
|
class Null(Criteria):
|
|
|
|
def __init__(self, attribute):
|
|
|
|
self.attribute = attribute
|
|
|
|
|
|
|
|
def build_lambda(self):
|
|
|
|
return lambda x: getattr(x, self.attribute, None) is None
|
|
|
|
|
|
|
|
|
2014-11-13 21:05:05 +01:00
|
|
|
def parse_clause(clause):
|
|
|
|
# creates a callable out of a clause
|
|
|
|
# (attribute, operator, value)
|
|
|
|
|
|
|
|
if callable(clause): # already a callable
|
|
|
|
return clause
|
|
|
|
|
|
|
|
def combine_callables(x1, x2):
|
|
|
|
return lambda x: x1(x) and x2(x)
|
|
|
|
|
|
|
|
func = lambda x: True
|
|
|
|
for element in clause:
|
|
|
|
if callable(element):
|
|
|
|
func = combine_callables(func, element)
|
|
|
|
else:
|
|
|
|
func = combine_callables(func, element.build_lambda())
|
|
|
|
return func
|
|
|
|
|
|
|
|
|
2010-08-23 15:35:59 +02:00
|
|
|
class StorageIndexException(Exception):
|
|
|
|
pass
|
|
|
|
|
2005-09-05 21:17:22 +02:00
|
|
|
class StorableObject(object):
|
2006-08-11 15:15:02 +02:00
|
|
|
_indexes = None
|
2008-04-16 00:13:52 +02:00
|
|
|
_hashed_indexes = None
|
2006-09-08 11:29:14 +02:00
|
|
|
_filename = None # None, unless must be saved to a specific location
|
2006-08-11 15:15:02 +02:00
|
|
|
|
|
|
|
def __init__(self, id = None):
|
|
|
|
self.id = id
|
2005-07-15 20:05:40 +02:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2005-09-05 21:17:22 +02:00
|
|
|
def get_table_name(cls):
|
|
|
|
return cls._names
|
2005-05-19 23:26:55 +02:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2005-09-05 21:17:22 +02:00
|
|
|
def get_objects_dir(cls):
|
|
|
|
return os.path.join(get_publisher().app_dir, cls.get_table_name())
|
2005-05-19 23:26:55 +02:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2005-09-05 21:17:22 +02:00
|
|
|
def keys(cls):
|
|
|
|
if not os.path.exists(cls.get_objects_dir()):
|
|
|
|
return []
|
2006-11-13 13:55:04 +01:00
|
|
|
return [fix_key(x) for x in os.listdir(cls.get_objects_dir()) if x[0] != '.']
|
2005-09-15 18:47:53 +02:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2013-05-23 18:10:41 +02:00
|
|
|
def values(cls, ignore_errors=False, ignore_migration=True):
|
|
|
|
values = [cls.get(x, ignore_errors=ignore_errors, ignore_migration=True) for x in cls.keys()]
|
2007-02-15 14:22:28 +01:00
|
|
|
return [x for x in values if x is not None]
|
2005-09-15 18:47:53 +02:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2005-09-15 18:47:53 +02:00
|
|
|
def items(cls):
|
|
|
|
return [(x, cls.get(x)) for x in cls.keys()]
|
2009-01-15 10:50:37 +01:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2014-12-01 18:50:27 +01:00
|
|
|
def count(cls, clause=None):
|
|
|
|
if clause:
|
|
|
|
return len(cls.select(clause))
|
2005-09-05 21:17:22 +02:00
|
|
|
return len(cls.keys())
|
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2015-07-23 18:11:20 +02:00
|
|
|
def select(cls, clause=None, order_by=None, ignore_errors=False,
|
2018-04-09 12:21:44 +02:00
|
|
|
ignore_migration=False, limit=None, offset=None, iterator=False, **kwargs):
|
2018-04-20 12:49:58 +02:00
|
|
|
# iterator: only for compatibility with sql select()
|
2007-09-19 17:10:52 +02:00
|
|
|
keys = cls.keys()
|
2015-07-23 18:11:20 +02:00
|
|
|
objects = (cls.get(k, ignore_errors=ignore_errors,
|
2018-04-09 12:21:44 +02:00
|
|
|
ignore_migration=ignore_migration, **kwargs) for k in keys)
|
2007-06-06 23:18:38 +02:00
|
|
|
if ignore_errors:
|
2009-11-21 15:13:33 +01:00
|
|
|
objects = (x for x in objects if x is not None)
|
2005-09-05 21:17:22 +02:00
|
|
|
if clause:
|
2014-11-13 21:05:05 +01:00
|
|
|
clause_function = parse_clause(clause)
|
|
|
|
objects = (x for x in objects if clause_function(x))
|
2005-09-05 21:17:22 +02:00
|
|
|
if order_by:
|
|
|
|
order_by = str(order_by)
|
|
|
|
if order_by[0] == '-':
|
|
|
|
reverse = True
|
|
|
|
order_by = order_by[1:]
|
|
|
|
else:
|
|
|
|
reverse = False
|
2009-11-21 15:13:33 +01:00
|
|
|
# only list can be sorted
|
|
|
|
objects = list(objects)
|
2014-12-31 10:08:18 +01:00
|
|
|
if order_by == 'id':
|
2019-11-12 11:40:52 +01:00
|
|
|
key_function = lambda x: lax_int(x.id)
|
2018-11-21 11:00:13 +01:00
|
|
|
elif order_by == 'name':
|
|
|
|
# proper collation should be done but it's messy to get working
|
|
|
|
# on all systems so we go the cheap and almost ok way.
|
|
|
|
from .misc import simplify
|
2019-11-12 11:40:52 +01:00
|
|
|
key_function = lambda x: simplify(x.name)
|
2019-11-16 12:18:41 +01:00
|
|
|
elif order_by.endswith('_time'):
|
|
|
|
typed_none = time.gmtime(-10**10) # 1653
|
|
|
|
key_function = lambda x: getattr(x, order_by) or typed_none
|
2014-12-31 10:08:18 +01:00
|
|
|
else:
|
2019-11-12 11:40:52 +01:00
|
|
|
key_function = lambda x: getattr(x, order_by)
|
|
|
|
objects.sort(key=key_function)
|
2005-09-05 21:17:22 +02:00
|
|
|
if reverse:
|
|
|
|
objects.reverse()
|
2014-11-30 22:05:46 +01:00
|
|
|
if limit or offset:
|
|
|
|
objects = _take(objects, limit, offset)
|
2009-11-21 15:13:33 +01:00
|
|
|
return list(objects)
|
2009-01-15 10:50:37 +01:00
|
|
|
|
2019-04-12 15:21:29 +02:00
|
|
|
@classmethod
|
|
|
|
def select_iterator(cls, **kwargs):
|
|
|
|
for obj in cls.select(**kwargs):
|
|
|
|
yield obj
|
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2005-09-05 21:17:22 +02:00
|
|
|
def has_key(cls, id):
|
2012-03-09 17:48:57 +01:00
|
|
|
filename = os.path.join(cls.get_objects_dir(), fix_key(id))
|
2019-11-17 10:29:35 +01:00
|
|
|
return os.path.exists(force_bytes(filename, 'utf-8'))
|
2005-09-05 21:17:22 +02:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2010-09-22 22:20:05 +02:00
|
|
|
def get_new_id(cls, create=False):
|
2005-09-05 21:17:22 +02:00
|
|
|
keys = cls.keys()
|
|
|
|
if not keys:
|
|
|
|
id = 1
|
|
|
|
else:
|
|
|
|
id = max([lax_int(x) for x in keys]) + 1
|
|
|
|
if id == 0:
|
|
|
|
id = len(keys)+1
|
2010-09-22 22:20:05 +02:00
|
|
|
if create:
|
|
|
|
objects_dir = cls.get_objects_dir()
|
|
|
|
object_filename = os.path.join(objects_dir, fix_key(id))
|
|
|
|
try:
|
|
|
|
fd = os.open(object_filename, os.O_CREAT | os.O_EXCL)
|
|
|
|
except OSError:
|
|
|
|
return cls.get_new_id(create=True)
|
|
|
|
os.close(fd)
|
2014-12-31 10:08:18 +01:00
|
|
|
return str(id)
|
2005-09-05 21:17:22 +02:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2018-04-09 12:21:44 +02:00
|
|
|
def get(cls, id, ignore_errors=False, ignore_migration=False, **kwargs):
|
2006-06-07 09:32:49 +02:00
|
|
|
if id is None:
|
2007-02-15 14:22:28 +01:00
|
|
|
if ignore_errors:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
raise KeyError()
|
2006-08-11 15:15:02 +02:00
|
|
|
filename = os.path.join(cls.get_objects_dir(), fix_key(id))
|
2010-01-11 14:13:57 +01:00
|
|
|
return cls.get_filename(filename, ignore_errors=ignore_errors,
|
2018-04-09 12:21:44 +02:00
|
|
|
ignore_migration=ignore_migration,
|
|
|
|
**kwargs)
|
2006-08-11 15:15:02 +02:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2014-06-03 15:54:16 +02:00
|
|
|
def get_ids(cls, ids, ignore_errors=False, keep_order=False):
|
2013-03-30 11:37:58 +01:00
|
|
|
objects = []
|
|
|
|
for x in ids:
|
|
|
|
obj = cls.get(x, ignore_errors=ignore_errors)
|
|
|
|
if obj is not None:
|
|
|
|
objects.append(obj)
|
|
|
|
return objects
|
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2012-05-30 16:27:34 +02:00
|
|
|
def get_on_index(cls, id, index, ignore_errors=False, ignore_migration=False):
|
2007-02-16 10:44:56 +01:00
|
|
|
if not cls._indexes:
|
|
|
|
raise KeyError()
|
2006-08-11 15:15:02 +02:00
|
|
|
objects_dir = cls.get_objects_dir()
|
|
|
|
index_dir = objects_dir + '-' + index
|
2007-02-16 10:44:56 +01:00
|
|
|
if not os.path.exists(index_dir):
|
|
|
|
cls.rebuild_indexes()
|
2010-04-19 13:34:36 +02:00
|
|
|
filename = os.path.join(index_dir, str(fix_key(id)))
|
2012-05-30 16:27:34 +02:00
|
|
|
return cls.get_filename(filename, ignore_errors=ignore_errors,
|
|
|
|
ignore_migration=ignore_migration)
|
2006-08-11 15:15:02 +02:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2011-01-31 13:34:13 +01:00
|
|
|
def get_ids_with_indexed_value(cls, index, value, auto_fallback=True):
|
2008-04-16 00:13:52 +02:00
|
|
|
objects_dir = cls.get_objects_dir()
|
2011-01-31 13:34:13 +01:00
|
|
|
index_dir = os.path.join(objects_dir, '.indexes', str(index))
|
2010-04-19 13:34:36 +02:00
|
|
|
index_file = os.path.join(index_dir, '%s-%s' % (index, fix_key(value)))
|
2008-04-16 00:13:52 +02:00
|
|
|
if not os.path.exists(index_dir):
|
2011-01-31 13:34:13 +01:00
|
|
|
if auto_fallback is False:
|
|
|
|
raise StorageIndexException()
|
2010-08-23 15:35:59 +02:00
|
|
|
try:
|
|
|
|
cls.rebuild_indexes()
|
|
|
|
except StorageIndexException:
|
|
|
|
values = cls.select(ignore_errors=True)
|
|
|
|
return [x for x in values if getattr(x, index) == value]
|
2008-04-16 00:13:52 +02:00
|
|
|
if not os.path.exists(index_file):
|
|
|
|
return []
|
2019-11-12 14:56:27 +01:00
|
|
|
return pickle.load(open(index_file, 'rb'))
|
2010-08-14 10:12:31 +02:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2010-08-14 10:12:31 +02:00
|
|
|
def get_with_indexed_value(cls, index, value, ignore_errors = False):
|
2010-08-16 08:42:18 +02:00
|
|
|
ids = cls.get_ids_with_indexed_value(str(index), str(value))
|
2018-04-16 16:09:17 +02:00
|
|
|
for x in ids:
|
|
|
|
obj = cls.get(x, ignore_errors=ignore_errors)
|
|
|
|
if obj is not None:
|
|
|
|
yield obj
|
2008-04-16 00:13:52 +02:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2014-11-24 16:14:58 +01:00
|
|
|
def storage_load(cls, fd):
|
2007-06-06 23:18:38 +02:00
|
|
|
if get_publisher() and get_publisher().unpickler_class:
|
|
|
|
unpickler = get_publisher().unpickler_class
|
|
|
|
else:
|
2007-06-07 15:13:54 +02:00
|
|
|
unpickler = pickle.Unpickler
|
2019-11-15 14:19:26 +01:00
|
|
|
return unpickler(fd, **PICKLE_KWARGS).load()
|
2014-11-24 16:14:58 +01:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2018-04-09 12:21:44 +02:00
|
|
|
def get_filename(cls, filename, ignore_errors=False, ignore_migration=False, **kwargs):
|
2006-06-07 09:32:49 +02:00
|
|
|
try:
|
2019-11-17 10:29:35 +01:00
|
|
|
fd = open(force_bytes(filename, 'utf-8'), 'rb')
|
2018-04-09 12:21:44 +02:00
|
|
|
o = cls.storage_load(fd, **kwargs)
|
2006-06-07 09:32:49 +02:00
|
|
|
except IOError:
|
2007-02-15 14:22:28 +01:00
|
|
|
if ignore_errors:
|
|
|
|
return None
|
2005-05-19 23:26:55 +02:00
|
|
|
raise KeyError()
|
2015-07-11 11:44:03 +02:00
|
|
|
except ImportError as e:
|
|
|
|
if ignore_errors:
|
|
|
|
return None
|
|
|
|
raise KeyError()
|
|
|
|
except EOFError as e:
|
|
|
|
# maybe it's being written to, loop for a while to see
|
|
|
|
current_position = fd.tell()
|
|
|
|
fd.close()
|
|
|
|
for i in range(10):
|
|
|
|
time.sleep(0.01)
|
|
|
|
if current_position != os.stat(filename).st_size:
|
|
|
|
return cls.get_filename(filename, ignore_errors=ignore_errors,
|
|
|
|
ignore_migration=ignore_migration)
|
2007-02-15 14:22:28 +01:00
|
|
|
if ignore_errors:
|
|
|
|
return None
|
2005-09-26 10:59:53 +02:00
|
|
|
raise KeyError()
|
2010-08-14 12:04:53 +02:00
|
|
|
o.__class__ = cls
|
2014-12-31 10:08:18 +01:00
|
|
|
if not ignore_migration:
|
|
|
|
o.id = str(o.id) # makes sure 'id' is a string
|
|
|
|
if hasattr(cls, 'migrate'):
|
|
|
|
o.migrate()
|
2005-09-05 21:17:22 +02:00
|
|
|
return o
|
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2014-11-27 16:45:56 +01:00
|
|
|
def rebuild_indexes(cls, indexes=[]):
|
2008-04-16 00:13:52 +02:00
|
|
|
if not (cls._indexes or cls._hashed_indexes):
|
2007-02-15 14:22:28 +01:00
|
|
|
return
|
|
|
|
|
2014-11-27 16:45:56 +01:00
|
|
|
if not indexes:
|
|
|
|
indexes = (cls._hashed_indexes or []) + (cls._indexes or [])
|
|
|
|
|
2007-02-15 14:22:28 +01:00
|
|
|
objects_dir = cls.get_objects_dir()
|
|
|
|
|
2008-04-16 00:13:52 +02:00
|
|
|
hashed_indexes = {}
|
|
|
|
|
|
|
|
for index in cls._hashed_indexes or []:
|
2014-11-27 16:45:56 +01:00
|
|
|
if index not in indexes:
|
|
|
|
continue
|
2010-08-14 10:12:31 +02:00
|
|
|
index_dir = os.path.join(objects_dir, '.indexes', index)
|
2008-04-16 00:13:52 +02:00
|
|
|
if not os.path.exists(index_dir):
|
2010-08-23 15:35:59 +02:00
|
|
|
try:
|
|
|
|
os.makedirs(index_dir)
|
|
|
|
except OSError:
|
|
|
|
raise StorageIndexException()
|
2008-04-16 00:13:52 +02:00
|
|
|
|
2013-05-23 18:10:41 +02:00
|
|
|
for object in cls.values(ignore_errors=True, ignore_migration=True):
|
2007-02-15 14:22:28 +01:00
|
|
|
object_filename = os.path.join(objects_dir, fix_key(object.id))
|
2008-06-20 15:23:58 +02:00
|
|
|
relative_object_filename = os.path.join('..', cls.get_table_name(), fix_key(object.id))
|
2008-04-16 00:13:52 +02:00
|
|
|
for index in cls._indexes or []:
|
2014-11-27 16:45:56 +01:00
|
|
|
if index not in indexes:
|
|
|
|
continue
|
2007-02-15 14:22:28 +01:00
|
|
|
if not hasattr(object, index) or getattr(object, index) is None:
|
|
|
|
continue
|
|
|
|
index_dir = objects_dir + '-' + index
|
2010-04-19 13:34:36 +02:00
|
|
|
link_name = os.path.join(index_dir, fix_key(str(getattr(object, index))))
|
2007-02-15 14:22:28 +01:00
|
|
|
try:
|
2008-06-20 15:23:58 +02:00
|
|
|
if relative_object_filename:
|
|
|
|
os.symlink(relative_object_filename, link_name)
|
|
|
|
else:
|
|
|
|
os.symlink(object_filename, link_name)
|
2019-02-28 15:01:09 +01:00
|
|
|
except OSError as exc:
|
2007-02-15 14:22:28 +01:00
|
|
|
if exc.errno == 2:
|
|
|
|
os.mkdir(index_dir)
|
|
|
|
elif exc.errno == 17:
|
2018-03-28 09:28:20 +02:00
|
|
|
try:
|
|
|
|
os.unlink(link_name)
|
|
|
|
except OSError as exc2:
|
|
|
|
if exc2.errno != 2: # no such file or directory
|
|
|
|
raise
|
|
|
|
# link got created in a different process, move
|
|
|
|
# along.
|
|
|
|
continue
|
2007-02-15 14:22:28 +01:00
|
|
|
else:
|
|
|
|
raise
|
2008-06-20 15:23:58 +02:00
|
|
|
if relative_object_filename:
|
|
|
|
os.symlink(relative_object_filename, link_name)
|
|
|
|
else:
|
|
|
|
os.symlink(object_filename, link_name)
|
2008-04-16 00:13:52 +02:00
|
|
|
for index in cls._hashed_indexes or []:
|
2014-11-27 16:45:56 +01:00
|
|
|
if index not in indexes:
|
|
|
|
continue
|
2008-04-16 00:13:52 +02:00
|
|
|
if not hasattr(object, index) or getattr(object, index) is None:
|
|
|
|
continue
|
2011-01-31 12:30:48 +01:00
|
|
|
attribute = getattr(object, index)
|
2013-01-23 15:07:13 +01:00
|
|
|
if type(attribute) is dict:
|
|
|
|
attribute = attribute.values()
|
2014-04-05 20:26:11 +02:00
|
|
|
elif type(attribute) not in (tuple, list, set):
|
2011-01-31 12:30:48 +01:00
|
|
|
attribute = [attribute]
|
|
|
|
for attr in attribute:
|
|
|
|
attr_value = fix_key(attr)
|
|
|
|
index_name = '%s-%s' % (index, attr_value)
|
|
|
|
if not index_name in hashed_indexes:
|
|
|
|
hashed_indexes[index_name] = []
|
2015-01-19 16:14:00 +01:00
|
|
|
hashed_indexes[index_name].append(str(object.id))
|
2008-04-16 00:13:52 +02:00
|
|
|
|
|
|
|
for index, content in hashed_indexes.items():
|
2010-08-14 10:12:31 +02:00
|
|
|
index_key = index.split('-')[0]
|
2014-11-27 16:45:56 +01:00
|
|
|
if index_key not in indexes:
|
|
|
|
continue
|
2010-08-14 10:12:31 +02:00
|
|
|
index_file = os.path.join(objects_dir, '.indexes', index_key, index)
|
2019-11-12 14:56:27 +01:00
|
|
|
pickle.dump(content, open(index_file, 'wb'), protocol=2)
|
2008-04-16 00:13:52 +02:00
|
|
|
|
2014-11-27 16:45:56 +01:00
|
|
|
for index in cls._hashed_indexes or []:
|
|
|
|
if index not in indexes:
|
|
|
|
continue
|
|
|
|
index_dir = os.path.join(objects_dir, '.indexes', index)
|
|
|
|
for filename in os.listdir(index_dir):
|
|
|
|
if filename not in hashed_indexes:
|
|
|
|
os.unlink(os.path.join(index_dir, filename))
|
|
|
|
|
2008-12-28 14:22:55 +01:00
|
|
|
def get_object_filename(self):
|
|
|
|
if self._filename:
|
|
|
|
if self._filename[0] == '/':
|
|
|
|
return self._filename
|
|
|
|
else:
|
|
|
|
return os.path.join(get_publisher().app_dir, self._filename)
|
|
|
|
else:
|
|
|
|
objects_dir = self.get_objects_dir()
|
|
|
|
return os.path.join(objects_dir, fix_key(self.id))
|
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2014-11-24 16:14:58 +01:00
|
|
|
def storage_dumps(cls, object):
|
2019-02-28 15:05:48 +01:00
|
|
|
return pickle.dumps(object, protocol=2)
|
2014-11-24 16:14:58 +01:00
|
|
|
|
2019-02-28 15:09:23 +01:00
|
|
|
def store(self, async_op=False):
|
2007-08-09 19:32:35 +02:00
|
|
|
objects_dir = self.get_objects_dir()
|
2010-09-22 22:20:05 +02:00
|
|
|
new_object = False
|
2006-09-08 11:29:14 +02:00
|
|
|
if self._filename:
|
|
|
|
if self._filename[0] == '/':
|
|
|
|
object_filename = self._filename
|
2008-06-20 15:23:58 +02:00
|
|
|
relative_object_filename = None
|
2006-09-08 11:29:14 +02:00
|
|
|
else:
|
|
|
|
object_filename = os.path.join(get_publisher().app_dir, self._filename)
|
2008-06-20 15:23:58 +02:00
|
|
|
relative_object_filename = os.path.join('..', self._filename)
|
2006-09-08 11:29:14 +02:00
|
|
|
else:
|
|
|
|
if not os.path.exists(objects_dir):
|
2011-02-14 11:01:22 +01:00
|
|
|
try:
|
|
|
|
os.mkdir(objects_dir)
|
2019-02-28 15:01:09 +01:00
|
|
|
except OSError as error:
|
2011-02-14 11:01:22 +01:00
|
|
|
if error.errno != 17: # 17 == Directory exists
|
|
|
|
raise
|
2010-09-22 22:20:05 +02:00
|
|
|
if self.id is None:
|
|
|
|
self.id = self.get_new_id(create=True)
|
|
|
|
new_object = True
|
2006-09-08 11:29:14 +02:00
|
|
|
object_filename = os.path.join(objects_dir, fix_key(self.id))
|
2008-06-20 15:23:58 +02:00
|
|
|
relative_object_filename = os.path.join('..', self.get_table_name(), fix_key(self.id))
|
2008-04-16 00:13:52 +02:00
|
|
|
|
2010-09-23 23:17:27 +02:00
|
|
|
previous_object_value = None
|
2010-09-22 22:20:05 +02:00
|
|
|
if not new_object and (self._indexes or self._hashed_indexes):
|
2010-01-13 11:06:21 +01:00
|
|
|
previous_object_value = self.get_filename(object_filename,
|
|
|
|
ignore_errors=True, ignore_migration=True)
|
2008-04-16 00:13:52 +02:00
|
|
|
|
2014-11-24 16:14:58 +01:00
|
|
|
s = self.storage_dumps(self)
|
2019-02-28 15:09:23 +01:00
|
|
|
atomic_write(object_filename, s, async_op)
|
2009-04-23 01:43:37 +02:00
|
|
|
# update last modified time
|
2009-05-12 14:35:05 +02:00
|
|
|
if os.path.exists(objects_dir):
|
|
|
|
os.utime(objects_dir, None)
|
2010-10-14 14:17:19 +02:00
|
|
|
|
2015-07-11 11:44:03 +02:00
|
|
|
with locket.lock_file(objects_dir + '.lock.index'):
|
|
|
|
try:
|
|
|
|
self.update_indexes(previous_object_value, relative_object_filename)
|
|
|
|
except:
|
|
|
|
# something failed, we can't keep using possibly broken indexes, so
|
|
|
|
# we notify of the bug and remove the indexes
|
|
|
|
get_publisher().notify_of_exception(sys.exc_info(), context='[STORAGE]')
|
|
|
|
self.destroy_indexes()
|
2010-10-14 14:17:19 +02:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2010-10-14 14:17:19 +02:00
|
|
|
def destroy_indexes(cls):
|
|
|
|
objects_dir = cls.get_objects_dir()
|
2010-10-21 14:39:07 +02:00
|
|
|
|
|
|
|
directories_to_trash = []
|
|
|
|
directories_to_wipe = []
|
|
|
|
|
|
|
|
for index in cls._indexes or []:
|
|
|
|
index_dir = objects_dir + '-' + index
|
|
|
|
directories_to_trash.append(index_dir)
|
|
|
|
|
|
|
|
directories_to_trash.append(os.path.join(objects_dir, '.indexes'))
|
|
|
|
|
|
|
|
for directory in directories_to_trash:
|
|
|
|
if not os.path.exists(directory):
|
2010-10-14 14:17:19 +02:00
|
|
|
continue
|
2010-10-21 14:39:07 +02:00
|
|
|
i = 0
|
|
|
|
while True:
|
|
|
|
trashed_index_name = directory + '.trash-%s' % i
|
|
|
|
i += 1
|
|
|
|
try:
|
|
|
|
os.mkdir(trashed_index_name)
|
|
|
|
except OSError:
|
|
|
|
continue
|
|
|
|
try:
|
2011-01-31 12:37:31 +01:00
|
|
|
os.rename(directory, os.path.join(trashed_index_name, 'idx'))
|
2010-10-21 14:39:07 +02:00
|
|
|
except OSError:
|
|
|
|
continue
|
|
|
|
directories_to_wipe.append(trashed_index_name)
|
|
|
|
break
|
|
|
|
|
|
|
|
for directory in directories_to_wipe:
|
|
|
|
shutil.rmtree(directory)
|
2010-10-14 14:17:19 +02:00
|
|
|
|
2010-10-21 14:27:47 +02:00
|
|
|
def update_indexes(self, previous_object_value, relative_object_filename):
|
2010-10-14 14:17:19 +02:00
|
|
|
objects_dir = self.get_objects_dir()
|
2008-06-20 15:23:58 +02:00
|
|
|
rebuilt_indexes = False
|
2008-04-16 00:13:52 +02:00
|
|
|
for index in self._indexes or []:
|
|
|
|
if not hasattr(self, index) or getattr(self, index) is None:
|
|
|
|
continue
|
|
|
|
index_dir = objects_dir + '-' + index
|
|
|
|
|
2010-04-19 13:34:36 +02:00
|
|
|
link_name = os.path.join(index_dir, fix_key(str(getattr(self, index))))
|
2008-04-16 00:13:52 +02:00
|
|
|
|
|
|
|
if previous_object_value:
|
|
|
|
old_link_name = os.path.join(index_dir,
|
2010-04-19 13:34:36 +02:00
|
|
|
fix_key(str(getattr(previous_object_value, index))))
|
2008-04-16 00:13:52 +02:00
|
|
|
if os.path.exists(old_link_name):
|
|
|
|
if old_link_name == link_name:
|
|
|
|
continue
|
|
|
|
os.unlink(old_link_name)
|
|
|
|
|
|
|
|
try:
|
2008-06-20 15:23:58 +02:00
|
|
|
if relative_object_filename:
|
|
|
|
os.symlink(relative_object_filename, link_name)
|
|
|
|
else:
|
2012-11-09 11:29:58 +01:00
|
|
|
os.symlink(self.get_object_filename(), link_name)
|
2019-02-28 15:01:09 +01:00
|
|
|
except OSError as exc:
|
2008-04-16 00:13:52 +02:00
|
|
|
if exc.errno == 2:
|
|
|
|
os.mkdir(index_dir)
|
2008-06-20 15:23:58 +02:00
|
|
|
if not rebuilt_indexes:
|
|
|
|
# perhaps index dir got removed; rebuild it before
|
|
|
|
# adding elements to it.
|
|
|
|
self.rebuild_indexes()
|
|
|
|
rebuilt_indexes = True
|
2008-04-16 00:13:52 +02:00
|
|
|
elif exc.errno == 17:
|
|
|
|
os.unlink(link_name)
|
|
|
|
else:
|
|
|
|
raise
|
2008-06-20 15:23:58 +02:00
|
|
|
if not rebuilt_indexes:
|
|
|
|
if relative_object_filename:
|
|
|
|
os.symlink(relative_object_filename, link_name)
|
|
|
|
else:
|
2012-11-09 11:29:58 +01:00
|
|
|
os.symlink(self.get_object_filename(), link_name)
|
2008-04-16 00:13:52 +02:00
|
|
|
|
|
|
|
for index in self._hashed_indexes or []:
|
2010-09-10 15:38:10 +02:00
|
|
|
index_dir = os.path.join(objects_dir, '.indexes', index)
|
2008-04-16 00:13:52 +02:00
|
|
|
if not os.path.exists(index_dir):
|
2015-07-11 11:44:03 +02:00
|
|
|
try:
|
|
|
|
os.makedirs(index_dir)
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno == errno.EEXIST: # File exists
|
|
|
|
pass
|
2008-04-16 00:13:52 +02:00
|
|
|
|
2011-01-31 12:30:48 +01:00
|
|
|
old_value = []
|
2013-03-30 23:04:50 +01:00
|
|
|
if type(getattr(self, index)) is dict:
|
2013-01-23 15:07:13 +01:00
|
|
|
new_value = getattr(self, index).values()
|
|
|
|
if previous_object_value:
|
|
|
|
old_value = getattr(previous_object_value, index)
|
2013-04-15 12:43:50 +02:00
|
|
|
if old_value is None:
|
|
|
|
old_value = []
|
|
|
|
else:
|
2013-01-23 15:07:13 +01:00
|
|
|
old_value = old_value.values()
|
2014-05-15 14:12:10 +02:00
|
|
|
elif type(getattr(self, index)) in (tuple, list, set):
|
2011-01-31 12:30:48 +01:00
|
|
|
new_value = getattr(self, index)
|
|
|
|
if previous_object_value:
|
|
|
|
old_value = getattr(previous_object_value, index)
|
2015-08-31 13:11:39 +02:00
|
|
|
if old_value is None:
|
|
|
|
old_value = []
|
2011-01-31 12:30:48 +01:00
|
|
|
else:
|
|
|
|
new_value = [getattr(self, index)]
|
|
|
|
if previous_object_value:
|
2013-04-15 12:43:50 +02:00
|
|
|
old_raw_value = getattr(previous_object_value, index)
|
|
|
|
if type(old_raw_value) is dict:
|
|
|
|
old_value = old_raw_value.values()
|
2014-05-15 14:12:10 +02:00
|
|
|
elif type(old_raw_value) in (tuple, list, set):
|
2013-04-15 12:43:50 +02:00
|
|
|
old_value = old_raw_value
|
|
|
|
else:
|
|
|
|
old_value = [old_raw_value]
|
2011-01-31 12:30:48 +01:00
|
|
|
|
|
|
|
for oldv in old_value:
|
|
|
|
if oldv in new_value:
|
2006-08-11 15:15:02 +02:00
|
|
|
continue
|
2011-01-31 12:30:48 +01:00
|
|
|
old_index_name = '%s-%s' % (index, fix_key(oldv))
|
2008-04-16 00:13:52 +02:00
|
|
|
old_index_file = os.path.join(index_dir, old_index_name)
|
|
|
|
if os.path.exists(old_index_file):
|
2019-11-12 14:56:27 +01:00
|
|
|
ids = [str(x) for x in pickle.load(open(old_index_file, 'rb'))]
|
2015-01-19 16:14:00 +01:00
|
|
|
if str(self.id) in ids:
|
|
|
|
ids.remove(str(self.id))
|
2019-11-12 14:56:27 +01:00
|
|
|
pickle.dump(ids, open(old_index_file, 'wb'), protocol=2)
|
2008-04-16 00:13:52 +02:00
|
|
|
|
2011-01-31 12:30:48 +01:00
|
|
|
for newv in new_value:
|
|
|
|
if newv in old_value:
|
|
|
|
continue
|
2011-01-31 13:34:36 +01:00
|
|
|
index_name = '%s-%s' % (index, fix_key(newv))
|
2011-01-31 12:30:48 +01:00
|
|
|
index_file = os.path.join(index_dir, index_name)
|
|
|
|
if os.path.exists(index_file):
|
2019-11-12 14:56:27 +01:00
|
|
|
ids = [str(x) for x in pickle.load(open(index_file, 'rb'))]
|
2011-01-31 12:30:48 +01:00
|
|
|
else:
|
|
|
|
ids = []
|
2015-01-19 16:14:00 +01:00
|
|
|
if not str(self.id) in ids:
|
|
|
|
ids.append(str(self.id))
|
2019-11-12 14:56:27 +01:00
|
|
|
pickle.dump(ids, open(index_file, 'wb'), protocol=2)
|
2008-04-16 00:13:52 +02:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2005-09-05 21:17:22 +02:00
|
|
|
def volatile(cls):
|
|
|
|
o = cls()
|
|
|
|
o.id = None
|
|
|
|
return o
|
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2005-09-05 21:17:22 +02:00
|
|
|
def remove_object(cls, id):
|
2006-10-12 14:04:25 +02:00
|
|
|
objects_dir = cls.get_objects_dir()
|
2008-04-16 00:27:42 +02:00
|
|
|
|
|
|
|
if cls._indexes or cls._hashed_indexes:
|
2006-10-12 14:04:25 +02:00
|
|
|
object = cls.get(id)
|
2008-04-16 00:27:42 +02:00
|
|
|
for index in cls._indexes or []:
|
2006-10-12 14:04:25 +02:00
|
|
|
if not hasattr(object, index) or getattr(object, index) is None:
|
|
|
|
continue
|
|
|
|
index_dir = objects_dir + '-' + index
|
2010-04-19 13:34:36 +02:00
|
|
|
link_name = os.path.join(index_dir, fix_key(str(getattr(object, index))))
|
2006-10-12 14:04:25 +02:00
|
|
|
try:
|
|
|
|
os.unlink(link_name)
|
2008-04-16 11:58:56 +02:00
|
|
|
except OSError:
|
|
|
|
pass
|
2008-04-16 00:27:42 +02:00
|
|
|
|
|
|
|
index_dir = os.path.join(objects_dir, '.indexes')
|
2008-04-16 11:54:34 +02:00
|
|
|
for index in cls._hashed_indexes or []:
|
2011-01-31 13:39:17 +01:00
|
|
|
attribute = getattr(object, index)
|
2014-05-15 14:12:10 +02:00
|
|
|
if type(attribute) not in (tuple, list, set):
|
2011-01-31 13:39:17 +01:00
|
|
|
attribute = [attribute]
|
|
|
|
for attr in attribute:
|
|
|
|
attr_value = fix_key(attr)
|
|
|
|
index_name = '%s-%s' % (index, attr_value)
|
|
|
|
index_file = os.path.join(index_dir, index, index_name)
|
|
|
|
if os.path.exists(index_file):
|
2019-11-12 14:56:27 +01:00
|
|
|
ids = [str(x) for x in pickle.load(open(index_file, 'rb'))]
|
2015-01-19 16:14:00 +01:00
|
|
|
if str(object.id) in ids:
|
|
|
|
ids.remove(str(object.id))
|
2019-11-12 14:56:27 +01:00
|
|
|
pickle.dump(ids, open(index_file, 'wb'), protocol=2)
|
2008-04-16 00:27:42 +02:00
|
|
|
|
2006-10-12 14:04:25 +02:00
|
|
|
os.unlink(os.path.join(objects_dir, fix_key(id)))
|
|
|
|
|
2005-09-05 21:17:22 +02:00
|
|
|
def remove_self(self):
|
2007-01-07 11:16:33 +01:00
|
|
|
self.remove_object(self.id)
|
2011-01-20 10:22:37 +01:00
|
|
|
|
2016-03-11 16:40:16 +01:00
|
|
|
@classmethod
|
2011-01-20 10:22:37 +01:00
|
|
|
def wipe(cls):
|
|
|
|
tmpdir = tempfile.mkdtemp(prefix='wiping', dir=os.path.join(get_publisher().app_dir))
|
|
|
|
dirs_to_move = []
|
|
|
|
objects_dir = cls.get_objects_dir()
|
|
|
|
dirs_to_move.append(objects_dir)
|
|
|
|
for index in cls._indexes or []:
|
|
|
|
index_dir = objects_dir + '-' + index
|
|
|
|
dirs_to_move.append(index_dir)
|
|
|
|
|
|
|
|
for directory in dirs_to_move:
|
|
|
|
if os.path.exists(directory):
|
|
|
|
os.rename(directory, os.path.join(tmpdir, os.path.basename(directory)))
|
|
|
|
|
|
|
|
shutil.rmtree(tmpdir)
|
|
|
|
|
2012-10-04 10:00:58 +02:00
|
|
|
def __repr__(self):
|
|
|
|
if hasattr(self, 'display_name'):
|
|
|
|
display_name = '%r ' % self.display_name
|
|
|
|
elif hasattr(self, 'get_display_name'):
|
|
|
|
display_name = '%r ' % self.get_display_name()
|
2012-10-04 10:33:47 +02:00
|
|
|
elif hasattr(self, 'name'):
|
|
|
|
display_name = '%r ' % self.name
|
2012-10-04 10:00:58 +02:00
|
|
|
else:
|
|
|
|
display_name = ''
|
|
|
|
return '<%s %sid:%s>' % (self.__class__.__name__, display_name, self.id)
|