wcs/wcs/qommon/storage.py

354 lines
13 KiB
Python

import os
import time
import pickle
from quixote import get_publisher
def lax_int(s):
try:
return int(s)
except ValueError:
return -1
def fix_key(k):
# insure key can be inserted in filesystem
if not k: return k
return str(k).replace('/', '-')
class StorableObject(object):
_indexes = None
_hashed_indexes = None
_filename = None # None, unless must be saved to a specific location
def __init__(self, id = None):
self.id = id
if get_publisher() and not self.id:
self.id = self.get_new_id()
def get_table_name(cls):
return cls._names
get_table_name = classmethod(get_table_name)
def get_objects_dir(cls):
return os.path.join(get_publisher().app_dir, cls.get_table_name())
get_objects_dir = classmethod(get_objects_dir)
def keys(cls):
if not os.path.exists(cls.get_objects_dir()):
return []
return [fix_key(x) for x in os.listdir(cls.get_objects_dir()) if x[0] != '.']
keys = classmethod(keys)
def values(cls, ignore_errors = False):
values = [cls.get(x, ignore_errors = ignore_errors) for x in cls.keys()]
return [x for x in values if x is not None]
values = classmethod(values)
def items(cls):
return [(x, cls.get(x)) for x in cls.keys()]
items = classmethod(items)
def count(cls):
return len(cls.keys())
count = classmethod(count)
def select(cls, clause = None, order_by = None, ignore_errors = False, limit = None):
keys = cls.keys()
if limit and len(keys) > limit:
keys = keys[:limit+1]
objects = [cls.get(k, ignore_errors = ignore_errors) for k in keys]
if ignore_errors:
objects = [x for x in objects if x is not None]
if clause:
objects = [x for x in objects if clause(x)]
if order_by:
order_by = str(order_by)
if order_by[0] == '-':
reverse = True
order_by = order_by[1:]
else:
reverse = False
objects.sort(lambda x,y: cmp(getattr(x, order_by), getattr(y, order_by)))
if reverse:
objects.reverse()
return objects
select = classmethod(select)
def has_key(cls, id):
return fix_key(id) in cls.keys()
has_key = classmethod(has_key)
def get_new_id(cls):
keys = cls.keys()
if not keys:
id = 1
else:
id = max([lax_int(x) for x in keys]) + 1
if id == 0:
id = len(keys)+1
return id
get_new_id = classmethod(get_new_id)
def get(cls, id, ignore_errors = False):
if id is None:
if ignore_errors:
return None
else:
raise KeyError()
filename = os.path.join(cls.get_objects_dir(), fix_key(id))
return cls.get_filename(filename, ignore_errors = ignore_errors)
get = classmethod(get)
def get_on_index(cls, id, index):
if not cls._indexes:
raise KeyError()
objects_dir = cls.get_objects_dir()
index_dir = objects_dir + '-' + index
if not os.path.exists(index_dir):
cls.rebuild_indexes()
filename = os.path.join(index_dir, str(id))
return cls.get_filename(filename)
get_on_index = classmethod(get_on_index)
def get_with_indexed_value(cls, index, value, ignore_errors = False):
objects_dir = cls.get_objects_dir()
index_dir = os.path.join(objects_dir, '.indexes')
index_file = os.path.join(index_dir, '%s-%s' % (index, value))
if not os.path.exists(index_dir):
cls.rebuild_indexes()
if not os.path.exists(index_file):
return []
ids = pickle.load(file(index_file))
objects = [cls.get(x, ignore_errors = ignore_errors) for x in ids]
return objects
get_with_indexed_value = classmethod(get_with_indexed_value)
def get_filename(cls, filename, ignore_errors = False):
if get_publisher() and get_publisher().unpickler_class:
unpickler = get_publisher().unpickler_class
else:
unpickler = pickle.Unpickler
try:
o = unpickler(file(filename)).load()
except IOError:
if ignore_errors:
return None
raise KeyError()
except (EOFError, ImportError), e:
if ignore_errors:
return None
raise KeyError()
o._names = cls._names
if hasattr(cls, 'migrate'):
o.migrate()
return o
get_filename = classmethod(get_filename)
def rebuild_indexes(cls):
if not (cls._indexes or cls._hashed_indexes):
return
objects_dir = cls.get_objects_dir()
hashed_indexes = {}
for index in cls._hashed_indexes or []:
index_dir = os.path.join(objects_dir, '.indexes')
if not os.path.exists(index_dir):
os.makedirs(index_dir)
for object in cls.values(ignore_errors = True):
object_filename = os.path.join(objects_dir, fix_key(object.id))
relative_object_filename = os.path.join('..', cls.get_table_name(), fix_key(object.id))
for index in cls._indexes or []:
if not hasattr(object, index) or getattr(object, index) is None:
continue
index_dir = objects_dir + '-' + index
link_name = os.path.join(index_dir, str(getattr(object, index)))
try:
if relative_object_filename:
os.symlink(relative_object_filename, link_name)
else:
os.symlink(object_filename, link_name)
except OSError, exc:
if exc.errno == 2:
os.mkdir(index_dir)
elif exc.errno == 17:
os.unlink(link_name)
else:
raise
if relative_object_filename:
os.symlink(relative_object_filename, link_name)
else:
os.symlink(object_filename, link_name)
for index in cls._hashed_indexes or []:
if not hasattr(object, index) or getattr(object, index) is None:
continue
index_name = '%s-%s' % (index, getattr(object, index))
if not index_name in hashed_indexes:
hashed_indexes[index_name] = []
hashed_indexes[index_name].append(object.id)
for index, content in hashed_indexes.items():
index_file = os.path.join(objects_dir, '.indexes', index)
pickle.dump(content, file(index_file, 'w'))
rebuild_indexes = classmethod(rebuild_indexes)
def get_object_filename(self):
if self._filename:
if self._filename[0] == '/':
return self._filename
else:
return os.path.join(get_publisher().app_dir, self._filename)
else:
objects_dir = self.get_objects_dir()
return os.path.join(objects_dir, fix_key(self.id))
def store(self):
objects_dir = self.get_objects_dir()
if self._filename:
if self._filename[0] == '/':
object_filename = self._filename
relative_object_filename = None
else:
object_filename = os.path.join(get_publisher().app_dir, self._filename)
relative_object_filename = os.path.join('..', self._filename)
else:
if not os.path.exists(objects_dir):
os.mkdir(objects_dir)
object_filename = os.path.join(objects_dir, fix_key(self.id))
relative_object_filename = os.path.join('..', self.get_table_name(), fix_key(self.id))
if self._indexes or self._hashed_indexes:
previous_object_value = self.get_filename(object_filename, True)
s = pickle.dumps(self)
open(object_filename, 'w').write(s)
rebuilt_indexes = False
for index in self._indexes or []:
if not hasattr(self, index) or getattr(self, index) is None:
continue
index_dir = objects_dir + '-' + index
link_name = os.path.join(index_dir, str(getattr(self, index)))
if previous_object_value:
old_link_name = os.path.join(index_dir,
str(getattr(previous_object_value, index)))
if os.path.exists(old_link_name):
if old_link_name == link_name:
continue
os.unlink(old_link_name)
try:
if relative_object_filename:
os.symlink(relative_object_filename, link_name)
else:
os.symlink(object_filename, link_name)
except OSError, exc:
if exc.errno == 2:
os.mkdir(index_dir)
if not rebuilt_indexes:
# perhaps index dir got removed; rebuild it before
# adding elements to it.
self.rebuild_indexes()
rebuilt_indexes = True
elif exc.errno == 17:
os.unlink(link_name)
else:
raise
if not rebuilt_indexes:
if relative_object_filename:
os.symlink(relative_object_filename, link_name)
else:
os.symlink(object_filename, link_name)
for index in self._hashed_indexes or []:
index_dir = os.path.join(objects_dir, '.indexes')
if not os.path.exists(index_dir):
os.makedirs(index_dir)
index_name = '%s-%s' % (index, getattr(self, index))
if previous_object_value:
old_index_name = '%s-%s' % (index, getattr(previous_object_value, index))
if old_index_name == index_name:
continue
old_index_file = os.path.join(index_dir, old_index_name)
if os.path.exists(old_index_file):
ids = pickle.load(file(old_index_file))
if self.id in ids:
ids.remove(self.id)
pickle.dump(ids, file(old_index_file, 'w'))
index_file = os.path.join(index_dir, index_name)
if os.path.exists(index_file):
ids = pickle.load(file(index_file))
else:
ids = []
if not self.id in ids:
ids.append(self.id)
pickle.dump(ids, file(index_file, 'w'))
def volatile(cls):
o = cls()
o.id = None
return o
volatile = classmethod(volatile)
def remove_object(cls, id):
objects_dir = cls.get_objects_dir()
if cls._indexes or cls._hashed_indexes:
object = cls.get(id)
for index in cls._indexes or []:
if not hasattr(object, index) or getattr(object, index) is None:
continue
index_dir = objects_dir + '-' + index
link_name = os.path.join(index_dir, str(getattr(object, index)))
try:
os.unlink(link_name)
except OSError:
pass
index_dir = os.path.join(objects_dir, '.indexes')
for index in cls._hashed_indexes or []:
index_name = '%s-%s' % (index, getattr(object, index))
index_file = os.path.join(index_dir, index_name)
if os.path.exists(index_file):
ids = pickle.load(file(index_file))
if object.id in ids:
ids.remove(object.id)
pickle.dump(ids, file(index_file, 'w'))
os.unlink(os.path.join(objects_dir, fix_key(id)))
remove_object = classmethod(remove_object)
def remove_self(self):
self.remove_object(self.id)
def last_modified_id(cls, id):
filename = os.path.join(cls.get_objects_dir(), fix_key(id))
mtime = 0
try:
stat = os.stat(filename)
mtime = stat.st_mtime
except OSError:
mtime = int(time.time())
return mtime
last_modified_id = classmethod(last_modified_id)
def last_modified(cls):
mtime = 0
try:
stat = os.stat(cls.get_objects_dir())
mtime = stat.st_mtime
except OSError:
mtime = int(time.time())
return mtime
last_modified = classmethod(last_modified)