2520 lines
96 KiB
Python
2520 lines
96 KiB
Python
# -*- coding: utf-8 -*-
|
|
# chrono - agendas system
|
|
# Copyright (C) 2016 Entr'ouvert
|
|
#
|
|
# This program is free software: you can redistribute it and/or modify it
|
|
# under the terms of the GNU Affero General Public License as published
|
|
# by the Free Software Foundation, either version 3 of the License, or
|
|
# (at your option) any later version.
|
|
#
|
|
# This program is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
# GNU Affero General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
import collections
|
|
import copy
|
|
import datetime
|
|
import functools
|
|
import itertools
|
|
import math
|
|
import sys
|
|
import uuid
|
|
|
|
import django
|
|
import requests
|
|
import vobject
|
|
from dateutil.rrule import DAILY, WEEKLY, rrule, rruleset
|
|
from django.conf import settings
|
|
from django.contrib.auth.models import Group
|
|
from django.contrib.postgres.fields import ArrayField
|
|
from django.core.exceptions import FieldDoesNotExist, ValidationError
|
|
from django.core.validators import MaxValueValidator, MinValueValidator
|
|
from django.db import connection, models, transaction
|
|
from django.db.models import Case, Count, Q, When
|
|
from django.template import Context, Template, TemplateSyntaxError, VariableDoesNotExist, engines
|
|
from django.urls import reverse
|
|
from django.utils import functional
|
|
from django.utils.dates import WEEKDAYS
|
|
from django.utils.encoding import force_text
|
|
from django.utils.formats import date_format
|
|
from django.utils.functional import cached_property
|
|
from django.utils.module_loading import import_string
|
|
from django.utils.text import slugify
|
|
from django.utils.timezone import is_aware, localtime, make_aware, make_naive, now, utc
|
|
from django.utils.translation import ugettext
|
|
from django.utils.translation import ugettext_lazy as _
|
|
from django.utils.translation import ungettext
|
|
from jsonfield import JSONField
|
|
|
|
from chrono.interval import Interval, IntervalSet
|
|
from chrono.utils.requests_wrapper import requests as requests_wrapper
|
|
|
|
AGENDA_KINDS = (
|
|
('events', _('Events')),
|
|
('meetings', _('Meetings')),
|
|
('virtual', _('Virtual')),
|
|
)
|
|
|
|
AGENDA_VIEWS = (
|
|
('day', _('Day view')),
|
|
('month', _('Month view')),
|
|
('open_events', _('Open events')),
|
|
)
|
|
|
|
|
|
def is_midnight(dtime):
|
|
dtime = localtime(dtime)
|
|
return dtime.hour == 0 and dtime.minute == 0
|
|
|
|
|
|
def generate_slug(instance, seen_slugs=None, **query_filters):
|
|
base_slug = instance.base_slug
|
|
slug = base_slug
|
|
i = 1
|
|
|
|
if seen_slugs is None:
|
|
# no optimization: check slug in DB each time
|
|
while instance._meta.model.objects.filter(slug=slug, **query_filters).exists():
|
|
slug = '%s-%s' % (base_slug, i)
|
|
i += 1
|
|
return slug
|
|
|
|
# seen_slugs is filled
|
|
while True:
|
|
if slug not in seen_slugs:
|
|
# check in DB to be sure, but only if not seen
|
|
queryset = instance._meta.model.objects.filter(slug=slug, **query_filters)
|
|
if not queryset.exists():
|
|
break
|
|
slug = '%s-%s' % (base_slug, i)
|
|
i += 1
|
|
seen_slugs.add(slug)
|
|
return slug
|
|
|
|
|
|
def clean_import_data(cls, data):
|
|
cleaned_data = copy.deepcopy(data)
|
|
for param in data:
|
|
try:
|
|
field = cls._meta.get_field(param)
|
|
except FieldDoesNotExist:
|
|
# remove unknown fields
|
|
cleaned_data.pop(param)
|
|
continue
|
|
if field.many_to_many:
|
|
# remove many to many fields, they have to be managed after update_or_create
|
|
cleaned_data.pop(param)
|
|
continue
|
|
if param == 'slug':
|
|
value = cleaned_data[param]
|
|
try:
|
|
field.run_validators(value)
|
|
except ValidationError:
|
|
raise AgendaImportError(_('Bad slug format "%s"') % value)
|
|
return cleaned_data
|
|
|
|
|
|
def validate_not_digit(value):
|
|
if value.isdigit():
|
|
raise ValidationError(_('This value cannot be a number.'))
|
|
|
|
|
|
def django_template_validator(value):
|
|
try:
|
|
engines['django'].from_string(value)
|
|
except TemplateSyntaxError as e:
|
|
raise ValidationError(_('syntax error: %s') % e)
|
|
|
|
|
|
class ICSError(Exception):
|
|
pass
|
|
|
|
|
|
class AgendaImportError(Exception):
|
|
pass
|
|
|
|
|
|
class Agenda(models.Model):
|
|
label = models.CharField(_('Label'), max_length=150)
|
|
slug = models.SlugField(_('Identifier'), max_length=160, unique=True)
|
|
kind = models.CharField(_('Kind'), max_length=20, choices=AGENDA_KINDS, default='events')
|
|
minimal_booking_delay = models.PositiveIntegerField(
|
|
_('Minimal booking delay (in days)'),
|
|
default=None,
|
|
null=True,
|
|
blank=True,
|
|
validators=[MaxValueValidator(10000)],
|
|
)
|
|
maximal_booking_delay = models.PositiveIntegerField(
|
|
_('Maximal booking delay (in days)'),
|
|
default=None,
|
|
null=True,
|
|
blank=True,
|
|
validators=[MaxValueValidator(10000)],
|
|
) # eight weeks
|
|
anonymize_delay = models.PositiveIntegerField(
|
|
_('Anonymize delay (in days)'),
|
|
default=None,
|
|
null=True,
|
|
blank=True,
|
|
validators=[MinValueValidator(30), MaxValueValidator(1000)],
|
|
help_text=_('User data will be kept for the specified number of days passed the booking date.'),
|
|
)
|
|
real_agendas = models.ManyToManyField(
|
|
'self',
|
|
related_name='virtual_agendas',
|
|
symmetrical=False,
|
|
through='VirtualMember',
|
|
through_fields=('virtual_agenda', 'real_agenda'),
|
|
)
|
|
edit_role = models.ForeignKey(
|
|
Group,
|
|
blank=True,
|
|
null=True,
|
|
default=None,
|
|
related_name='+',
|
|
verbose_name=_('Edit Role'),
|
|
on_delete=models.SET_NULL,
|
|
)
|
|
view_role = models.ForeignKey(
|
|
Group,
|
|
blank=True,
|
|
null=True,
|
|
default=None,
|
|
related_name='+',
|
|
verbose_name=_('View Role'),
|
|
on_delete=models.SET_NULL,
|
|
)
|
|
resources = models.ManyToManyField('Resource')
|
|
category = models.ForeignKey(
|
|
'Category', verbose_name=_('Category'), blank=True, null=True, on_delete=models.SET_NULL
|
|
)
|
|
absence_reasons_group = models.ForeignKey(
|
|
'AbsenceReasonGroup',
|
|
verbose_name=_('Absence reasons group'),
|
|
blank=True,
|
|
null=True,
|
|
on_delete=models.SET_NULL,
|
|
)
|
|
default_view = models.CharField(_('Default view'), max_length=20, choices=AGENDA_VIEWS, default='month')
|
|
booking_form_url = models.CharField(
|
|
_('Booking form URL'), max_length=200, blank=True, validators=[django_template_validator]
|
|
)
|
|
desk_simple_management = models.BooleanField(default=False)
|
|
|
|
class Meta:
|
|
ordering = ['label']
|
|
|
|
def __str__(self):
|
|
return self.label
|
|
|
|
def save(self, *args, **kwargs):
|
|
created = bool(not self.pk)
|
|
if not self.slug:
|
|
self.slug = generate_slug(self)
|
|
if self.kind != 'virtual':
|
|
if self.minimal_booking_delay is None:
|
|
self.minimal_booking_delay = 1
|
|
if self.maximal_booking_delay is None:
|
|
self.maximal_booking_delay = 8 * 7
|
|
if self.kind != 'events' and self.pk is None:
|
|
self.default_view = 'day'
|
|
super(Agenda, self).save(*args, **kwargs)
|
|
if created and self.kind == 'events':
|
|
desk = Desk.objects.create(agenda=self, slug='_exceptions_holder')
|
|
desk.import_timeperiod_exceptions_from_settings()
|
|
|
|
@property
|
|
def base_slug(self):
|
|
return slugify(self.label)
|
|
|
|
def get_absolute_url(self):
|
|
return reverse('chrono-manager-agenda-view', kwargs={'pk': self.id})
|
|
|
|
def can_be_managed(self, user):
|
|
if user.is_staff:
|
|
return True
|
|
group_ids = [x.id for x in user.groups.all()]
|
|
return bool(self.edit_role_id in group_ids)
|
|
|
|
def can_be_viewed(self, user):
|
|
if self.can_be_managed(user):
|
|
return True
|
|
group_ids = [x.id for x in user.groups.all()]
|
|
return bool(self.view_role_id in group_ids)
|
|
|
|
def accept_meetings(self):
|
|
if self.kind == 'virtual':
|
|
return not self.real_agendas.filter(~Q(kind='meetings')).exists()
|
|
return self.kind == 'meetings'
|
|
|
|
def get_real_agendas(self):
|
|
if self.kind == 'virtual':
|
|
return self.real_agendas.all()
|
|
return [self]
|
|
|
|
@cached_property
|
|
def cached_meetingtypes(self):
|
|
return list(self.iter_meetingtypes())
|
|
|
|
def iter_meetingtypes(self, excluded_agenda=None):
|
|
"""Expose agenda's meetingtypes.
|
|
straighforward on a real agenda
|
|
On a virtual agenda we expose transient meeting types based on on the
|
|
the real ones shared by every real agendas.
|
|
"""
|
|
if self.kind == 'virtual':
|
|
base_qs = MeetingType.objects.filter(agenda__virtual_agendas__in=[self], deleted=False)
|
|
real_agendas = self.real_agendas
|
|
if excluded_agenda:
|
|
base_qs = base_qs.exclude(agenda=excluded_agenda)
|
|
real_agendas = real_agendas.exclude(pk=excluded_agenda.pk)
|
|
queryset = (
|
|
base_qs.values('slug', 'duration', 'label')
|
|
.annotate(total=Count('*'))
|
|
.filter(total=real_agendas.count())
|
|
)
|
|
return [
|
|
MeetingType(duration=mt['duration'], label=mt['label'], slug=mt['slug'])
|
|
for mt in queryset.order_by('slug')
|
|
]
|
|
|
|
return self.meetingtype_set.filter(deleted=False).all().order_by('slug')
|
|
|
|
def get_meetingtype(self, id_=None, slug=None):
|
|
match = id_ or slug
|
|
assert match, 'an identifier or a slug should be specified'
|
|
|
|
if self.kind == 'virtual':
|
|
match = id_ or slug
|
|
meeting_type = None
|
|
for mt in self.cached_meetingtypes:
|
|
if mt.slug == match:
|
|
meeting_type = mt
|
|
break
|
|
if meeting_type is None:
|
|
raise MeetingType.DoesNotExist()
|
|
return meeting_type
|
|
|
|
if id_:
|
|
return MeetingType.objects.get(id=id_, agenda=self, deleted=False)
|
|
return MeetingType.objects.get(slug=slug, agenda=self, deleted=False)
|
|
|
|
def get_virtual_members(self):
|
|
return VirtualMember.objects.filter(virtual_agenda=self)
|
|
|
|
def get_max_meeting_duration(self):
|
|
return max(x.duration for x in self.cached_meetingtypes)
|
|
|
|
def get_base_meeting_duration(self):
|
|
durations = [x.duration for x in self.cached_meetingtypes]
|
|
if not durations:
|
|
raise ValueError()
|
|
gcd = durations[0]
|
|
for duration in durations[1:]:
|
|
gcd = math.gcd(duration, gcd)
|
|
if gcd == 0:
|
|
raise ValueError()
|
|
return gcd
|
|
|
|
def export_json(self):
|
|
agenda = {
|
|
'label': self.label,
|
|
'slug': self.slug,
|
|
'kind': self.kind,
|
|
'category': self.category.slug if self.category else None,
|
|
'minimal_booking_delay': self.minimal_booking_delay,
|
|
'maximal_booking_delay': self.maximal_booking_delay,
|
|
'permissions': {
|
|
'view': self.view_role.name if self.view_role else None,
|
|
'edit': self.edit_role.name if self.edit_role else None,
|
|
},
|
|
'resources': [x.slug for x in self.resources.all()],
|
|
}
|
|
if hasattr(self, 'reminder_settings'):
|
|
agenda['reminder_settings'] = self.reminder_settings.export_json()
|
|
if self.kind == 'events':
|
|
agenda['default_view'] = self.default_view
|
|
agenda['booking_form_url'] = self.booking_form_url
|
|
agenda['events'] = [x.export_json() for x in self.event_set.filter(primary_event__isnull=True)]
|
|
if hasattr(self, 'notifications_settings'):
|
|
agenda['notifications_settings'] = self.notifications_settings.export_json()
|
|
agenda['absence_reasons_group'] = (
|
|
self.absence_reasons_group.slug if self.absence_reasons_group else None
|
|
)
|
|
agenda['exceptions_desk'] = self.desk_set.get().export_json()
|
|
elif self.kind == 'meetings':
|
|
agenda['meetingtypes'] = [x.export_json() for x in self.meetingtype_set.filter(deleted=False)]
|
|
agenda['desks'] = [desk.export_json() for desk in self.desk_set.all()]
|
|
agenda['desk_simple_management'] = self.desk_simple_management
|
|
elif self.kind == 'virtual':
|
|
agenda['excluded_timeperiods'] = [x.export_json() for x in self.excluded_timeperiods.all()]
|
|
agenda['real_agendas'] = [{'slug': x.slug, 'kind': x.kind} for x in self.real_agendas.all()]
|
|
return agenda
|
|
|
|
@classmethod
|
|
def import_json(cls, data, overwrite=False):
|
|
data = data.copy()
|
|
permissions = data.pop('permissions') or {}
|
|
reminder_settings = data.pop('reminder_settings', None)
|
|
if data['kind'] == 'events':
|
|
events = data.pop('events')
|
|
notifications_settings = data.pop('notifications_settings', None)
|
|
exceptions_desk = data.pop('exceptions_desk', None)
|
|
elif data['kind'] == 'meetings':
|
|
meetingtypes = data.pop('meetingtypes')
|
|
desks = data.pop('desks')
|
|
elif data['kind'] == 'virtual':
|
|
excluded_timeperiods = data.pop('excluded_timeperiods')
|
|
real_agendas = data.pop('real_agendas')
|
|
for permission in ('view', 'edit'):
|
|
if permissions.get(permission):
|
|
data[permission + '_role'] = Group.objects.get(name=permissions[permission])
|
|
resources_slug = data.pop('resources', [])
|
|
resources_by_slug = {r.slug: r for r in Resource.objects.filter(slug__in=resources_slug)}
|
|
for resource_slug in resources_slug:
|
|
if resource_slug not in resources_by_slug:
|
|
raise AgendaImportError(_('Missing "%s" resource') % resource_slug)
|
|
data = clean_import_data(cls, data)
|
|
desk_simple_management = data.pop('desk_simple_management', None)
|
|
if data.get('category'):
|
|
try:
|
|
data['category'] = Category.objects.get(slug=data['category'])
|
|
except Category.DoesNotExist:
|
|
raise AgendaImportError(_('Missing "%s" category') % data['category'])
|
|
if data.get('absence_reasons_group'):
|
|
try:
|
|
data['absence_reasons_group'] = AbsenceReasonGroup.objects.get(
|
|
slug=data['absence_reasons_group']
|
|
)
|
|
except AbsenceReasonGroup.DoesNotExist:
|
|
raise AgendaImportError(
|
|
_('Missing "%s" absence reasons group') % data['absence_reasons_group']
|
|
)
|
|
agenda, created = cls.objects.get_or_create(slug=data['slug'], defaults=data)
|
|
if not created:
|
|
for k, v in data.items():
|
|
setattr(agenda, k, v)
|
|
if overwrite:
|
|
AgendaReminderSettings.objects.filter(agenda=agenda).delete()
|
|
if reminder_settings:
|
|
reminder_settings['agenda'] = agenda
|
|
AgendaReminderSettings.import_json(reminder_settings)
|
|
if data['kind'] == 'events':
|
|
if overwrite:
|
|
Event.objects.filter(agenda=agenda).delete()
|
|
AgendaNotificationsSettings.objects.filter(agenda=agenda).delete()
|
|
for event_data in events:
|
|
event_data['agenda'] = agenda
|
|
Event.import_json(event_data)
|
|
if notifications_settings:
|
|
notifications_settings['agenda'] = agenda
|
|
AgendaNotificationsSettings.import_json(notifications_settings)
|
|
if exceptions_desk:
|
|
exceptions_desk['agenda'] = agenda
|
|
Desk.import_json(exceptions_desk)
|
|
elif data['kind'] == 'meetings':
|
|
if overwrite:
|
|
MeetingType.objects.filter(agenda=agenda).delete()
|
|
Desk.objects.filter(agenda=agenda).delete()
|
|
for type_data in meetingtypes:
|
|
type_data['agenda'] = agenda
|
|
MeetingType.import_json(type_data)
|
|
for desk in desks:
|
|
desk['agenda'] = agenda
|
|
Desk.import_json(desk)
|
|
agenda.resources.set(resources_by_slug.values())
|
|
elif data['kind'] == 'virtual':
|
|
if overwrite:
|
|
TimePeriod.objects.filter(agenda=agenda).delete()
|
|
VirtualMember.objects.filter(virtual_agenda=agenda).delete()
|
|
for excluded_timeperiod in excluded_timeperiods:
|
|
excluded_timeperiod['agenda'] = agenda
|
|
TimePeriod.import_json(excluded_timeperiod)
|
|
for real_agenda in real_agendas:
|
|
try:
|
|
real_agenda = Agenda.objects.get(slug=real_agenda['slug'], kind=real_agenda['kind'])
|
|
except Agenda.DoesNotExist:
|
|
raise AgendaImportError(_('The real agenda "%s" does not exist.') % real_agenda['slug'])
|
|
try:
|
|
vm, created = VirtualMember.objects.get_or_create(
|
|
virtual_agenda=agenda, real_agenda=real_agenda
|
|
)
|
|
vm.clean()
|
|
except ValidationError as exc:
|
|
raise AgendaImportError(' '.join(exc.messages))
|
|
|
|
if data['kind'] == 'meetings' and desk_simple_management is not None:
|
|
if desk_simple_management is True and not agenda.desk_simple_management:
|
|
if agenda.is_available_for_simple_management():
|
|
agenda.desk_simple_management = True
|
|
agenda.save()
|
|
elif desk_simple_management is False and agenda.desk_simple_management:
|
|
agenda.desk_simple_management = False
|
|
agenda.save()
|
|
|
|
return created
|
|
|
|
def duplicate(self, label=None):
|
|
# clone current agenda
|
|
new_agenda = copy.deepcopy(self)
|
|
new_agenda.pk = None
|
|
new_agenda.label = label or _('Copy of %s') % self.label
|
|
# reset slug
|
|
new_agenda.slug = None
|
|
new_agenda.save()
|
|
|
|
# clone related objects
|
|
if self.kind == 'meetings':
|
|
for meeting_type in self.meetingtype_set.all():
|
|
meeting_type.duplicate(agenda_target=new_agenda)
|
|
for desk in self.desk_set.all():
|
|
desk.duplicate(agenda_target=new_agenda)
|
|
new_agenda.resources.set(self.resources.all())
|
|
elif self.kind == 'events':
|
|
for event in self.event_set.all():
|
|
event.duplicate(agenda_target=new_agenda)
|
|
elif self.kind == 'virtual':
|
|
for timeperiod in self.excluded_timeperiods.all():
|
|
timeperiod.duplicate(agenda_target=new_agenda)
|
|
for real_agenda in self.real_agendas.all():
|
|
VirtualMember.objects.create(virtual_agenda=new_agenda, real_agenda=real_agenda)
|
|
return new_agenda
|
|
|
|
def get_effective_time_periods(self):
|
|
"""Regroup timeperiods by desks.
|
|
|
|
List all timeperiods, timeperiods having the same begin_time and
|
|
end_time are regrouped in a SharedTimePeriod object, which has a
|
|
list of desks instead of only one desk.
|
|
"""
|
|
if self.kind == 'virtual':
|
|
return self.get_effective_time_periods_virtual()
|
|
elif self.kind == 'meetings':
|
|
return self.get_effective_time_periods_meetings()
|
|
else:
|
|
raise ValueError('does not work with kind %r' % self.kind)
|
|
|
|
def get_effective_time_periods_meetings(self):
|
|
"""List timeperiod instances for all desks of the agenda, convert them
|
|
into an Interval of WeekTime which can be compared and regrouped using
|
|
itertools.groupby.
|
|
"""
|
|
yield from (
|
|
SharedTimePeriod.from_weektime_interval(
|
|
weektime_interval,
|
|
desks=[time_period.desk for time_period in time_periods],
|
|
)
|
|
for weektime_interval, time_periods in itertools.groupby(
|
|
TimePeriod.objects.filter(desk__agenda=self)
|
|
.prefetch_related('desk')
|
|
.order_by('weekday', 'start_time', 'end_time'),
|
|
key=TimePeriod.as_weektime_interval,
|
|
)
|
|
)
|
|
|
|
def get_effective_time_periods_virtual(self):
|
|
"""List timeperiod instances for all desks of all real agendas of this
|
|
virtual agenda, convert them into an Interval of WeekTime which can be
|
|
compared and regrouped using itertools.groupby.
|
|
"""
|
|
closed_hours_by_days = IntervalSet.from_ordered(
|
|
[
|
|
time_period.as_weektime_interval()
|
|
for time_period in self.excluded_timeperiods.order_by('weekday', 'start_time', 'end_time')
|
|
]
|
|
)
|
|
for time_period_interval, time_periods in itertools.groupby(
|
|
TimePeriod.objects.filter(desk__agenda__virtual_agendas=self)
|
|
.order_by('weekday', 'start_time', 'end_time')
|
|
.prefetch_related('desk'),
|
|
key=lambda tp: tp.as_weektime_interval(),
|
|
):
|
|
time_periods = list(time_periods)
|
|
desks = [time_period.desk for time_period in time_periods]
|
|
if not closed_hours_by_days:
|
|
yield SharedTimePeriod.from_weektime_interval(time_period_interval, desks=desks)
|
|
else:
|
|
for weektime_interval in IntervalSet.simple(*time_period_interval) - closed_hours_by_days:
|
|
yield SharedTimePeriod.from_weektime_interval(weektime_interval, desks=desks)
|
|
|
|
@property
|
|
def max_booking_datetime(self):
|
|
if self.maximal_booking_delay is None:
|
|
return None
|
|
|
|
# compute middle of today with localtime
|
|
# 28 Mar 2021 12:00 +01:00
|
|
t = localtime(now()).replace(hour=12, minute=0)
|
|
# advance of self.maximal_booking_delay - 1 days
|
|
# 28 Mar 2021 12:00 +01:00 == 28 Mars 2021 13:00 +02:00 as DST happend on 28 Mar 2021.
|
|
t += datetime.timedelta(days=self.maximal_booking_delay)
|
|
# move to midnight of the day before, DST happen between 2h/3h so it
|
|
# always exists because +/- timedelta does not move the timezone, only
|
|
# localtime() does it.
|
|
# 27 Mar 2021 12:00 +01:00 == 28 Mars 2021 01:00 +02:00
|
|
t = t.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
return localtime(t)
|
|
|
|
@property
|
|
def min_booking_datetime(self):
|
|
if self.minimal_booking_delay is None:
|
|
return None
|
|
|
|
# compute middle of today with localtime
|
|
# 28 Mar 2021 12:00 +01:00
|
|
t = localtime(now()).replace(hour=12, minute=0)
|
|
# advance of self.maximal_booking_delay - 1 days
|
|
# 28 Mar 2021 12:00 +01:00 == 28 Mars 2021 13:00 +02:00 as DST happend on 28 Mar 2021.
|
|
t += datetime.timedelta(days=self.minimal_booking_delay)
|
|
# move to midnight of the day before, DST happen between 2h/3h so it
|
|
# always exists because +/- timedelta does not move the timezone, only
|
|
# localtime() does it.
|
|
# 27 Mar 2021 12:00 +01:00 == 28 Mars 2021 01:00 +02:00
|
|
t = t.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
return localtime(t)
|
|
|
|
def get_open_events(
|
|
self,
|
|
prefetched_queryset=False,
|
|
annotate_queryset=False,
|
|
include_full=True,
|
|
min_start=None,
|
|
max_start=None,
|
|
excluded_user_external_id=None,
|
|
):
|
|
assert self.kind == 'events'
|
|
|
|
if prefetched_queryset:
|
|
entries = self.prefetched_events
|
|
else:
|
|
# recurring events are never opened
|
|
entries = self.event_set.filter(recurrence_rule__isnull=True)
|
|
# exclude canceled events except for event recurrences
|
|
entries = entries.filter(Q(cancelled=False) | Q(primary_event__isnull=False))
|
|
# we never want to allow booking for past events.
|
|
entries = entries.filter(start_datetime__gte=localtime(now()))
|
|
# exclude non published events
|
|
entries = entries.filter(
|
|
Q(publication_date__isnull=True) | Q(publication_date__lte=localtime(now()).date())
|
|
)
|
|
if not include_full:
|
|
entries = entries.filter(Q(full=False) | Q(primary_event__isnull=False))
|
|
|
|
if self.minimal_booking_delay:
|
|
min_start = max(self.min_booking_datetime, min_start) if min_start else self.min_booking_datetime
|
|
|
|
if min_start:
|
|
if prefetched_queryset:
|
|
entries = [e for e in entries if e.start_datetime >= min_start]
|
|
else:
|
|
entries = entries.filter(start_datetime__gte=min_start)
|
|
|
|
if self.maximal_booking_delay:
|
|
max_start = min(self.max_booking_datetime, max_start) if max_start else self.max_booking_datetime
|
|
|
|
if max_start:
|
|
if prefetched_queryset:
|
|
entries = [e for e in entries if e.start_datetime < max_start]
|
|
else:
|
|
entries = entries.filter(start_datetime__lt=max_start)
|
|
|
|
if excluded_user_external_id and not prefetched_queryset:
|
|
entries = Event.annotate_queryset_for_user(entries, excluded_user_external_id)
|
|
|
|
if annotate_queryset and not prefetched_queryset:
|
|
entries = Event.annotate_queryset(entries)
|
|
|
|
if max_start:
|
|
entries = self.add_event_recurrences(
|
|
entries,
|
|
min_start or localtime(now()),
|
|
max_start,
|
|
include_full=include_full,
|
|
prefetched_queryset=prefetched_queryset,
|
|
)
|
|
|
|
return entries
|
|
|
|
def add_event_recurrences(
|
|
self,
|
|
events,
|
|
min_start,
|
|
max_start,
|
|
include_full=True,
|
|
include_cancelled=False,
|
|
prefetched_queryset=False,
|
|
):
|
|
excluded_datetimes = collections.defaultdict(list)
|
|
for event in events:
|
|
if event.primary_event_id:
|
|
excluded_datetimes[event.primary_event_id].append(event.datetime_slug)
|
|
|
|
events = [
|
|
e for e in events if (not e.cancelled or include_cancelled) and (not e.full or include_full)
|
|
]
|
|
|
|
if prefetched_queryset:
|
|
recurring_events = self.prefetched_recurring_events
|
|
else:
|
|
recurring_events = self.event_set.filter(recurrence_rule__isnull=False)
|
|
|
|
exceptions = self.get_recurrence_exceptions(min_start, max_start)
|
|
for event in recurring_events:
|
|
events.extend(
|
|
event.get_recurrences(
|
|
min_start, max_start, excluded_datetimes.get(event.pk), exceptions, slug_separator=':'
|
|
)
|
|
)
|
|
|
|
events.sort(
|
|
key=lambda x: [(getattr(x, field) is None, getattr(x, field)) for field in Event._meta.ordering]
|
|
)
|
|
return events
|
|
|
|
def get_booking_form_url(self):
|
|
if not self.booking_form_url:
|
|
return
|
|
template_vars = Context(settings.TEMPLATE_VARS)
|
|
try:
|
|
return Template(self.booking_form_url).render(template_vars)
|
|
except (VariableDoesNotExist, TemplateSyntaxError):
|
|
return
|
|
|
|
def get_recurrence_exceptions(self, min_start, max_start):
|
|
return TimePeriodException.objects.filter(
|
|
Q(desk__slug='_exceptions_holder', desk__agenda=self)
|
|
| Q(
|
|
unavailability_calendar__desks__slug='_exceptions_holder',
|
|
unavailability_calendar__desks__agenda=self,
|
|
),
|
|
start_datetime__lt=max_start,
|
|
end_datetime__gt=min_start,
|
|
)
|
|
|
|
def prefetch_desks_and_exceptions(self, with_sources=False):
|
|
if self.kind == 'meetings':
|
|
desks = self.desk_set.all()
|
|
elif self.kind == 'virtual':
|
|
desks = (
|
|
Desk.objects.filter(agenda__virtual_agendas=self)
|
|
.select_related('agenda')
|
|
.order_by('agenda', 'label')
|
|
)
|
|
else:
|
|
raise ValueError('does not work with kind %r' % self.kind)
|
|
|
|
self.prefetched_desks = desks.prefetch_related('timeperiod_set', 'unavailability_calendars')
|
|
if with_sources:
|
|
self.prefetched_desks = self.prefetched_desks.prefetch_related('timeperiodexceptionsource_set')
|
|
unavailability_calendar_ids = UnavailabilityCalendar.objects.filter(
|
|
desks__in=self.prefetched_desks
|
|
).values('pk')
|
|
all_desks_exceptions = TimePeriodException.objects.filter(
|
|
Q(desk__in=self.prefetched_desks) | Q(unavailability_calendar__in=unavailability_calendar_ids)
|
|
)
|
|
for desk in self.prefetched_desks:
|
|
uc_ids = [uc.pk for uc in desk.unavailability_calendars.all()]
|
|
desk.prefetched_exceptions = [
|
|
e
|
|
for e in all_desks_exceptions
|
|
if e.desk_id == desk.pk or e.unavailability_calendar_id in uc_ids
|
|
]
|
|
|
|
def is_available_for_simple_management(self):
|
|
if self.kind != 'meetings':
|
|
return False
|
|
|
|
was_prefetched = False
|
|
if hasattr(self, 'prefetched_desks'):
|
|
desks = self.prefetched_desks
|
|
was_prefetched = True
|
|
else:
|
|
desks = self.desk_set.all()
|
|
if len(desks) < 2:
|
|
# no desk or just one, it's ok
|
|
return True
|
|
|
|
desk = desks[0]
|
|
|
|
def values_list(obj, qs_name, qs, fields, for_exception=False):
|
|
if not was_prefetched:
|
|
prefetched_qs = getattr(obj, qs).values_list(*fields)
|
|
if for_exception:
|
|
prefetched_qs = prefetched_qs.filter(source__isnull=True)
|
|
return prefetched_qs
|
|
values = []
|
|
if for_exception:
|
|
prefetched_qs = obj.prefetched_exceptions
|
|
else:
|
|
prefetched_qs = obj._prefetched_objects_cache.get(qs_name) # XXX django 1.11 compat
|
|
if prefetched_qs is None:
|
|
prefetched_qs = obj._prefetched_objects_cache.get(qs)
|
|
for inst in prefetched_qs:
|
|
# queryset is prefetched, fake values_list
|
|
if for_exception and inst.source_id is not None:
|
|
continue
|
|
values.append(tuple(getattr(inst, f) for f in fields))
|
|
return values
|
|
|
|
period_fields = ['weekday', 'start_time', 'end_time']
|
|
exception_fields = ['label', 'start_datetime', 'end_datetime']
|
|
source_fields = ['ics_filename', 'ics_url', 'settings_slug', 'enabled']
|
|
desk_time_periods = set(values_list(desk, 'timeperiod', 'timeperiod_set', period_fields))
|
|
desk_exceptions = set(
|
|
values_list(
|
|
desk, 'timeperiodexception', 'timeperiodexception_set', exception_fields, for_exception=True
|
|
)
|
|
)
|
|
desk_sources = set(
|
|
values_list(desk, 'timeperiodexceptionsource', 'timeperiodexceptionsource_set', source_fields)
|
|
)
|
|
desk_unavaibility_calendars = set(
|
|
values_list(desk, 'unavailability_calendars', 'unavailability_calendars', ['pk'])
|
|
)
|
|
for other_desk in desks[1:]:
|
|
# compare time periods
|
|
other_desk_time_periods = set(
|
|
values_list(other_desk, 'timeperiod', 'timeperiod_set', period_fields)
|
|
)
|
|
if desk_time_periods != other_desk_time_periods:
|
|
return False
|
|
|
|
# compare exceptions
|
|
other_desk_exceptions = set(
|
|
values_list(
|
|
other_desk,
|
|
'timeperiodexception',
|
|
'timeperiodexception_set',
|
|
exception_fields,
|
|
for_exception=True,
|
|
)
|
|
)
|
|
if desk_exceptions != other_desk_exceptions:
|
|
return False
|
|
|
|
# compare sources
|
|
other_desk_sources = set(
|
|
values_list(
|
|
other_desk, 'timeperiodexceptionsource', 'timeperiodexceptionsource_set', source_fields
|
|
)
|
|
)
|
|
if desk_sources != other_desk_sources:
|
|
return False
|
|
|
|
# compare unavailability calendars
|
|
other_desk_unavaibility_calendars = set(
|
|
values_list(other_desk, 'unavailability_calendars', 'unavailability_calendars', ['pk'])
|
|
)
|
|
if desk_unavaibility_calendars != other_desk_unavaibility_calendars:
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
class VirtualMember(models.Model):
|
|
"""Trough model to link virtual agendas to their real agendas.
|
|
|
|
Real agendas linked to a virtual agenda MUST all have the same list of
|
|
MeetingType based on their label, slug and duration. It's enforced by
|
|
VirtualMember.clean() and the realted management views.
|
|
"""
|
|
|
|
virtual_agenda = models.ForeignKey(Agenda, on_delete=models.CASCADE, related_name='real_members')
|
|
real_agenda = models.ForeignKey(
|
|
Agenda, on_delete=models.CASCADE, related_name='virtual_members', verbose_name='Agenda'
|
|
)
|
|
|
|
class Meta:
|
|
unique_together = (('virtual_agenda', 'real_agenda'),)
|
|
|
|
def clean(self):
|
|
error_msg = [_('This agenda does not have the same meeting types provided by the virtual agenda.')]
|
|
virtual_meetingtypes = self.virtual_agenda.iter_meetingtypes(excluded_agenda=self.real_agenda)
|
|
if not virtual_meetingtypes:
|
|
return
|
|
virtual_meetingtypes = set([(mt.label, mt.slug, mt.duration) for mt in virtual_meetingtypes])
|
|
real_meetingtypes = self.real_agenda.iter_meetingtypes()
|
|
real_meetingtypes = set([(mt.label, mt.slug, mt.duration) for mt in real_meetingtypes])
|
|
if virtual_meetingtypes - real_meetingtypes:
|
|
# missing meeting type in real agenda
|
|
for mt in virtual_meetingtypes - real_meetingtypes:
|
|
error_msg += [
|
|
_(
|
|
'Meeting type "%(label)s" (%(duration)s minutes) '
|
|
'(identifier: %(slug)s) does no exist.'
|
|
)
|
|
% {'label': mt[0], 'slug': mt[1], 'duration': mt[2]}
|
|
]
|
|
raise ValidationError(error_msg)
|
|
elif real_meetingtypes - virtual_meetingtypes:
|
|
# extra meeting type in real agenda
|
|
for mt in real_meetingtypes - virtual_meetingtypes:
|
|
error_msg += ['Extra meeting type, "%s".' % mt[0]]
|
|
raise ValidationError(error_msg)
|
|
|
|
|
|
WEEKDAYS_LIST = sorted(WEEKDAYS.items(), key=lambda x: x[0])
|
|
|
|
|
|
class WeekTime(collections.namedtuple('WeekTime', ['weekday', 'time'])):
|
|
"""Representation of a time point in a weekday, ex.: Monday at 5 o'clock."""
|
|
|
|
def __repr__(self):
|
|
return '%s / %s' % (
|
|
force_text(WEEKDAYS[self.weekday]),
|
|
date_format(self.time, 'TIME_FORMAT'),
|
|
)
|
|
|
|
|
|
class TimePeriod(models.Model):
|
|
weekday = models.IntegerField(_('Week day'), choices=WEEKDAYS_LIST)
|
|
start_time = models.TimeField(_('Start'))
|
|
end_time = models.TimeField(_('End'))
|
|
desk = models.ForeignKey('Desk', on_delete=models.CASCADE, null=True)
|
|
agenda = models.ForeignKey(
|
|
Agenda, on_delete=models.CASCADE, null=True, related_name='excluded_timeperiods'
|
|
)
|
|
|
|
class Meta:
|
|
ordering = ['weekday', 'start_time']
|
|
|
|
def __str__(self):
|
|
return u'%s / %s → %s' % (
|
|
force_text(WEEKDAYS[self.weekday]),
|
|
date_format(self.start_time, 'TIME_FORMAT'),
|
|
date_format(self.end_time, 'TIME_FORMAT'),
|
|
)
|
|
|
|
def save(self, *args, **kwargs):
|
|
if self.agenda:
|
|
assert self.agenda.kind == 'virtual', "a time period can only reference a virtual agenda"
|
|
super(TimePeriod, self).save(*args, **kwargs)
|
|
|
|
@property
|
|
def weekday_str(self):
|
|
return WEEKDAYS[self.weekday]
|
|
|
|
@classmethod
|
|
def import_json(cls, data):
|
|
data = clean_import_data(cls, data)
|
|
cls.objects.update_or_create(defaults=data, **data)
|
|
|
|
def export_json(self):
|
|
return {
|
|
'weekday': self.weekday,
|
|
'start_time': self.start_time.strftime('%H:%M'),
|
|
'end_time': self.end_time.strftime('%H:%M'),
|
|
}
|
|
|
|
def duplicate(self, desk_target=None, agenda_target=None):
|
|
# clone current period
|
|
new_period = copy.deepcopy(self)
|
|
new_period.pk = None
|
|
# set desk
|
|
new_period.desk = desk_target or self.desk
|
|
# set agenda
|
|
new_period.agenda = agenda_target or self.agenda
|
|
# store new period
|
|
new_period.save()
|
|
|
|
return new_period
|
|
|
|
def as_weektime_interval(self):
|
|
return Interval(
|
|
WeekTime(self.weekday, self.start_time),
|
|
WeekTime(self.weekday, self.end_time),
|
|
)
|
|
|
|
def as_shared_timeperiods(self):
|
|
return SharedTimePeriod(
|
|
weekday=self.weekday,
|
|
start_time=self.start_time,
|
|
end_time=self.end_time,
|
|
desks=[self.desk],
|
|
)
|
|
|
|
|
|
@functools.total_ordering
|
|
class SharedTimePeriod(object):
|
|
"""
|
|
Hold common timeperiod for multiple desks.
|
|
|
|
To improve performance when generating meetings slots for virtual
|
|
agendas or agendas with many desks, we deduplicate time-periods between
|
|
all desks of all agendas.
|
|
|
|
Deduplication is based on a common key, and implemented through __eq__
|
|
and __lt__ which will be used by itertools.groupby().
|
|
|
|
(weekday, start_datetime, end_datetime)
|
|
|
|
it's done in the deduplicate() classmethod.
|
|
|
|
At the level of gel_all_slots() timeperiod are re-duplicated if the
|
|
min_datetime,max_datetime of the desk's agendas differs (see the code
|
|
of get_all_slots() for details).
|
|
"""
|
|
|
|
__slots__ = ['weekday', 'start_time', 'end_time', 'desks']
|
|
|
|
def __init__(self, weekday, start_time, end_time, desks):
|
|
self.weekday = weekday
|
|
self.start_time = start_time
|
|
self.end_time = end_time
|
|
self.desks = set(desks)
|
|
|
|
def __str__(self):
|
|
return u'%s / %s → %s' % (
|
|
force_text(WEEKDAYS[self.weekday]),
|
|
date_format(self.start_time, 'TIME_FORMAT'),
|
|
date_format(self.end_time, 'TIME_FORMAT'),
|
|
)
|
|
|
|
def __eq__(self, other):
|
|
return (self.weekday, self.start_time, self.end_time) == (
|
|
other.weekday,
|
|
other.start_time,
|
|
other.end_time,
|
|
)
|
|
|
|
def __lt__(self, other):
|
|
return (self.weekday, self.start_time, self.end_time) < (
|
|
other.weekday,
|
|
other.start_time,
|
|
other.end_time,
|
|
)
|
|
|
|
def get_time_slots(self, min_datetime, max_datetime, meeting_duration, base_duration):
|
|
"""Generate all possible time slots between min_datetime and max_datime
|
|
of duration meeting_duration minutes and spaced by base_duration
|
|
minutes, i.e.
|
|
|
|
compute a list [a,b] -> [c,d] -> ...
|
|
where b-a = meeting_duration and c-a = base_duration.
|
|
|
|
We start with the first time following min_datetime and being on
|
|
the same weekday of the current period.
|
|
|
|
Then we iterate, advancing by base_duration minutes each time.
|
|
|
|
If we cross the end_time of the period or end of the current_day
|
|
(means end_time is midnight), it advance time to self.start_time on
|
|
the next week (same weekday, same start, one week in the future).
|
|
|
|
When it crosses end_datetime it stops.
|
|
|
|
Generated start_datetime MUST be in the local timezone, and the local
|
|
timezone must not change, as the API needs it to generate stable ids.
|
|
"""
|
|
meeting_duration = datetime.timedelta(minutes=meeting_duration)
|
|
duration = datetime.timedelta(minutes=base_duration)
|
|
|
|
real_min_datetime = min_datetime + datetime.timedelta(days=self.weekday - min_datetime.weekday())
|
|
if real_min_datetime < min_datetime:
|
|
real_min_datetime += datetime.timedelta(days=7)
|
|
|
|
# make sure datetime in local timezone, it's ABSOLUTELY necessary
|
|
# to have stable event ids in the API.
|
|
real_min_datetime = real_min_datetime.replace(
|
|
hour=12
|
|
) # so aware datetime will be int the dst of the day
|
|
event_datetime = make_aware(make_naive(real_min_datetime)).replace(
|
|
hour=self.start_time.hour, minute=self.start_time.minute, second=0, microsecond=0
|
|
)
|
|
# don't start before min_datetime
|
|
event_datetime = max(event_datetime, min_datetime)
|
|
|
|
# get slots
|
|
while event_datetime < max_datetime:
|
|
end_time = event_datetime + meeting_duration
|
|
next_time = event_datetime + duration
|
|
if end_time.time() > self.end_time or event_datetime.date() != next_time.date():
|
|
# switch to naive time for day/week changes
|
|
event_datetime = make_naive(event_datetime)
|
|
# back to morning
|
|
event_datetime = event_datetime.replace(
|
|
hour=self.start_time.hour, minute=self.start_time.minute
|
|
)
|
|
# but next week
|
|
event_datetime += datetime.timedelta(days=7)
|
|
# and re-align to timezone afterwards
|
|
event_datetime = make_aware(event_datetime)
|
|
next_time = event_datetime + duration
|
|
|
|
# don't end after max_datetime
|
|
if event_datetime > max_datetime:
|
|
break
|
|
|
|
yield event_datetime
|
|
event_datetime = next_time
|
|
|
|
@classmethod
|
|
def from_weektime_interval(cls, weektime_interval, desks=()):
|
|
begin, end = weektime_interval
|
|
assert begin.weekday == end.weekday
|
|
return cls(
|
|
weekday=begin.weekday,
|
|
start_time=begin.time,
|
|
end_time=end.time,
|
|
desks=desks,
|
|
)
|
|
|
|
|
|
class MeetingType(models.Model):
|
|
agenda = models.ForeignKey(Agenda, on_delete=models.CASCADE)
|
|
label = models.CharField(_('Label'), max_length=150)
|
|
slug = models.SlugField(_('Identifier'), max_length=160)
|
|
duration = models.IntegerField(_('Duration (in minutes)'), default=30, validators=[MinValueValidator(1)])
|
|
deleted = models.BooleanField(_('Deleted'), default=False)
|
|
|
|
class Meta:
|
|
ordering = ['duration', 'label']
|
|
unique_together = ['agenda', 'slug']
|
|
|
|
def save(self, *args, **kwargs):
|
|
assert self.agenda.kind != 'virtual', "a meetingtype can't reference a virtual agenda"
|
|
if not self.slug:
|
|
self.slug = generate_slug(self, agenda=self.agenda)
|
|
super(MeetingType, self).save(*args, **kwargs)
|
|
|
|
@property
|
|
def base_slug(self):
|
|
return slugify(self.label)
|
|
|
|
@classmethod
|
|
def import_json(cls, data):
|
|
data = clean_import_data(cls, data)
|
|
cls.objects.update_or_create(slug=data['slug'], agenda=data['agenda'], defaults=data)
|
|
|
|
def export_json(self):
|
|
return {
|
|
'label': self.label,
|
|
'slug': self.slug,
|
|
'duration': self.duration,
|
|
}
|
|
|
|
def duplicate(self, agenda_target=None):
|
|
new_meeting_type = copy.deepcopy(self)
|
|
new_meeting_type.pk = None
|
|
if agenda_target:
|
|
new_meeting_type.agenda = agenda_target
|
|
else:
|
|
new_meeting_type.slug = None
|
|
new_meeting_type.save()
|
|
|
|
return new_meeting_type
|
|
|
|
|
|
class Event(models.Model):
|
|
REPEAT_CHOICES = [
|
|
('daily', _('Daily')),
|
|
('weekly', _('Weekly')),
|
|
('2-weeks', _('Once every two weeks')),
|
|
('weekdays', _('Every weekdays (Monday to Friday)')),
|
|
]
|
|
|
|
agenda = models.ForeignKey(Agenda, on_delete=models.CASCADE)
|
|
start_datetime = models.DateTimeField(_('Date/time'))
|
|
repeat = models.CharField(_('Repeat'), max_length=16, blank=True, choices=REPEAT_CHOICES)
|
|
recurrence_rule = JSONField(_('Recurrence rule'), null=True)
|
|
recurrence_end_date = models.DateField(_('Recurrence end date'), null=True, blank=True)
|
|
primary_event = models.ForeignKey('self', null=True, on_delete=models.CASCADE, related_name='recurrences')
|
|
duration = models.PositiveIntegerField(_('Duration (in minutes)'), default=None, null=True, blank=True)
|
|
publication_date = models.DateField(_('Publication date'), blank=True, null=True)
|
|
places = models.PositiveIntegerField(_('Places'))
|
|
waiting_list_places = models.PositiveIntegerField(_('Places in waiting list'), default=0)
|
|
label = models.CharField(
|
|
_('Label'),
|
|
max_length=150,
|
|
null=True,
|
|
blank=True,
|
|
help_text=_('Optional label to identify this date.'),
|
|
)
|
|
slug = models.SlugField(_('Identifier'), max_length=160, blank=True, validators=[validate_not_digit])
|
|
description = models.TextField(
|
|
_('Description'), null=True, blank=True, help_text=_('Optional event description.')
|
|
)
|
|
pricing = models.CharField(_('Pricing'), max_length=150, null=True, blank=True)
|
|
url = models.CharField(_('URL'), max_length=200, null=True, blank=True)
|
|
almost_full = models.BooleanField(default=False)
|
|
full = models.BooleanField(default=False)
|
|
cancelled = models.BooleanField(default=False)
|
|
cancellation_scheduled = models.BooleanField(default=False)
|
|
meeting_type = models.ForeignKey(MeetingType, null=True, on_delete=models.CASCADE)
|
|
desk = models.ForeignKey('Desk', null=True, on_delete=models.CASCADE)
|
|
resources = models.ManyToManyField('Resource')
|
|
|
|
almost_full_notification_timestamp = models.DateTimeField(null=True, blank=True)
|
|
full_notification_timestamp = models.DateTimeField(null=True, blank=True)
|
|
cancelled_notification_timestamp = models.DateTimeField(null=True, blank=True)
|
|
|
|
class Meta:
|
|
ordering = ['agenda', 'start_datetime', 'duration', 'label']
|
|
unique_together = ('agenda', 'slug')
|
|
|
|
def __str__(self):
|
|
if self.label:
|
|
return self.label
|
|
return date_format(localtime(self.start_datetime), format='DATETIME_FORMAT')
|
|
|
|
@functional.cached_property
|
|
def cancellation_status(self):
|
|
if self.cancelled:
|
|
return _('Cancelled')
|
|
if self.cancellation_scheduled:
|
|
return _('Cancellation in progress')
|
|
|
|
def save(self, seen_slugs=None, *args, **kwargs):
|
|
assert self.agenda.kind != 'virtual', "an event can't reference a virtual agenda"
|
|
assert not (self.slug and self.slug.isdigit()), 'slug cannot be a number'
|
|
self.start_datetime = self.start_datetime.replace(second=0, microsecond=0)
|
|
self.check_full()
|
|
if not self.slug:
|
|
self.slug = generate_slug(self, seen_slugs=seen_slugs, agenda=self.agenda)
|
|
self.recurrence_rule = self.get_recurrence_rule()
|
|
return super(Event, self).save(*args, **kwargs)
|
|
|
|
@property
|
|
def base_slug(self):
|
|
# label can be empty
|
|
return slugify(self.label or ('%s-event' % self.agenda.label))
|
|
|
|
@functional.cached_property
|
|
def main_list_full(self):
|
|
return bool(self.booked_places >= self.places)
|
|
|
|
def check_full(self):
|
|
self.full = bool(
|
|
(self.booked_places >= self.places and self.waiting_list_places == 0)
|
|
or (self.waiting_list_places and self.waiting_list >= self.waiting_list_places)
|
|
)
|
|
self.almost_full = bool(self.booked_places >= 0.9 * self.places)
|
|
|
|
def in_bookable_period(self):
|
|
if self.publication_date and localtime(now()).date() < self.publication_date:
|
|
return False
|
|
if self.agenda.maximal_booking_delay and self.start_datetime > self.agenda.max_booking_datetime:
|
|
return False
|
|
if self.recurrence_rule is not None:
|
|
# bookable recurrences probably exist
|
|
return True
|
|
if self.agenda.minimal_booking_delay and self.start_datetime < self.agenda.min_booking_datetime:
|
|
return False
|
|
if self.start_datetime < now():
|
|
# past the event date, we may want in the future to allow for some
|
|
# extra late booking but it's forbidden for now.
|
|
return False
|
|
return True
|
|
|
|
def is_day_past(self):
|
|
return self.start_datetime.date() <= now().date()
|
|
|
|
@staticmethod
|
|
def annotate_queryset(qs):
|
|
if django.VERSION < (2, 0):
|
|
return qs.annotate(
|
|
booked_places_count=Count(
|
|
Case(
|
|
When(
|
|
booking__cancellation_datetime__isnull=True,
|
|
booking__in_waiting_list=False,
|
|
then='booking',
|
|
)
|
|
)
|
|
),
|
|
waiting_list_count=Count(
|
|
Case(
|
|
When(
|
|
booking__cancellation_datetime__isnull=True,
|
|
booking__in_waiting_list=True,
|
|
then='booking',
|
|
)
|
|
)
|
|
),
|
|
)
|
|
else:
|
|
return qs.annotate(
|
|
booked_places_count=Count(
|
|
'booking',
|
|
filter=Q(booking__cancellation_datetime__isnull=True, booking__in_waiting_list=False),
|
|
),
|
|
waiting_list_count=Count(
|
|
'booking',
|
|
filter=Q(booking__cancellation_datetime__isnull=True, booking__in_waiting_list=True),
|
|
),
|
|
)
|
|
|
|
@staticmethod
|
|
def annotate_queryset_for_user(qs, excluded_user_external_id):
|
|
if django.VERSION < (2, 0):
|
|
return qs.annotate(
|
|
user_places_count=Count(
|
|
Case(
|
|
When(
|
|
booking__cancellation_datetime__isnull=True,
|
|
booking__in_waiting_list=False,
|
|
booking__user_external_id=excluded_user_external_id,
|
|
then='booking',
|
|
)
|
|
)
|
|
),
|
|
)
|
|
else:
|
|
return qs.annotate(
|
|
user_places_count=Count(
|
|
'booking',
|
|
filter=Q(
|
|
booking__cancellation_datetime__isnull=True,
|
|
booking__in_waiting_list=False,
|
|
booking__user_external_id=excluded_user_external_id,
|
|
),
|
|
),
|
|
)
|
|
|
|
@property
|
|
def booked_places(self):
|
|
if hasattr(self, 'booked_places_count'):
|
|
return self.booked_places_count
|
|
return self.booking_set.filter(cancellation_datetime__isnull=True, in_waiting_list=False).count()
|
|
|
|
@property
|
|
def remaining_places(self):
|
|
return max(0, self.places - self.booked_places)
|
|
|
|
@property
|
|
def waiting_list(self):
|
|
if hasattr(self, 'waiting_list_count'):
|
|
return self.waiting_list_count
|
|
return self.booking_set.filter(cancellation_datetime__isnull=True, in_waiting_list=True).count()
|
|
|
|
@property
|
|
def remaining_waiting_list_places(self):
|
|
return max(0, self.waiting_list_places - self.waiting_list)
|
|
|
|
@property
|
|
def end_datetime(self):
|
|
if self.meeting_type:
|
|
minutes = self.meeting_type.duration
|
|
else:
|
|
minutes = self.duration
|
|
if minutes is None:
|
|
return None
|
|
return self.start_datetime + datetime.timedelta(minutes=minutes)
|
|
|
|
def get_absolute_url(self):
|
|
return reverse('chrono-manager-event-edit', kwargs={'pk': self.agenda.id, 'event_pk': self.id})
|
|
|
|
def get_absolute_view_url(self):
|
|
return reverse('chrono-manager-event-view', kwargs={'pk': self.agenda.id, 'event_pk': self.id})
|
|
|
|
@classmethod
|
|
def import_json(cls, data):
|
|
data['start_datetime'] = make_aware(
|
|
datetime.datetime.strptime(data['start_datetime'], '%Y-%m-%d %H:%M:%S')
|
|
)
|
|
data = clean_import_data(cls, data)
|
|
if data.get('slug'):
|
|
event, _ = cls.objects.update_or_create(slug=data['slug'], defaults=data)
|
|
else:
|
|
event = cls(**data)
|
|
event.save()
|
|
if event.recurrence_rule and event.recurrence_end_date:
|
|
event.refresh_from_db()
|
|
event.recurrences.filter(start_datetime__gt=event.recurrence_end_date).delete()
|
|
update_fields = {
|
|
field: getattr(event, field)
|
|
for field in [
|
|
'label',
|
|
'duration',
|
|
'publication_date',
|
|
'places',
|
|
'waiting_list_places',
|
|
'description',
|
|
'pricing',
|
|
'url',
|
|
]
|
|
}
|
|
event.recurrences.update(**update_fields)
|
|
excluded_datetimes = [
|
|
make_naive(dt) for dt in event.recurrences.values_list('start_datetime', flat=True)
|
|
]
|
|
event.create_all_recurrences(excluded_datetimes)
|
|
|
|
def export_json(self):
|
|
recurrence_end_date = (
|
|
self.recurrence_end_date.strftime('%Y-%m-%d') if self.recurrence_end_date else None
|
|
)
|
|
return {
|
|
'start_datetime': make_naive(self.start_datetime).strftime('%Y-%m-%d %H:%M:%S'),
|
|
'publication_date': self.publication_date.strftime('%Y-%m-%d') if self.publication_date else None,
|
|
'repeat': self.repeat,
|
|
'recurrence_rule': self.recurrence_rule,
|
|
'recurrence_end_date': recurrence_end_date,
|
|
'places': self.places,
|
|
'waiting_list_places': self.waiting_list_places,
|
|
'label': self.label,
|
|
'slug': self.slug,
|
|
'description': self.description,
|
|
'url': self.url,
|
|
'pricing': self.pricing,
|
|
}
|
|
|
|
def duplicate(self, agenda_target=None):
|
|
new_event = copy.deepcopy(self)
|
|
new_event.pk = None
|
|
if agenda_target:
|
|
new_event.agenda = agenda_target
|
|
else:
|
|
new_event.slug = None
|
|
new_event.save()
|
|
|
|
return new_event
|
|
|
|
def cancel(self, cancel_bookings=True):
|
|
bookings_to_cancel = self.booking_set.filter(cancellation_datetime__isnull=True).all()
|
|
if cancel_bookings and bookings_to_cancel.exclude(cancel_callback_url='').exists():
|
|
# booking cancellation needs network calls, schedule it for later
|
|
self.cancellation_scheduled = True
|
|
self.save()
|
|
else:
|
|
with transaction.atomic():
|
|
for booking in bookings_to_cancel:
|
|
booking.cancel()
|
|
self.cancelled = True
|
|
self.save()
|
|
|
|
def get_or_create_event_recurrence(self, start_datetime):
|
|
events = self.get_recurrences(start_datetime, start_datetime)
|
|
|
|
if len(events) == 0:
|
|
raise ValueError('No event recurrence found for specified datetime.')
|
|
elif len(events) > 1: # should not happen
|
|
raise ValueError('Multiple events found for specified datetime.')
|
|
|
|
event = events[0]
|
|
with transaction.atomic():
|
|
try:
|
|
return Event.objects.get(agenda=self.agenda, slug=event.slug)
|
|
except Event.DoesNotExist:
|
|
event.save()
|
|
return event
|
|
|
|
def get_recurrences(
|
|
self, min_datetime, max_datetime, excluded_datetimes=None, exceptions=None, slug_separator='--'
|
|
):
|
|
recurrences = []
|
|
rrule_set = rruleset()
|
|
# do not generate recurrences for existing events
|
|
rrule_set._exdate = excluded_datetimes or []
|
|
|
|
if exceptions is None:
|
|
exceptions = self.agenda.get_recurrence_exceptions(min_datetime, max_datetime)
|
|
for exception in exceptions:
|
|
exception_start = localtime(exception.start_datetime)
|
|
event_start = localtime(self.start_datetime)
|
|
if event_start.time() < exception_start.time():
|
|
exception_start += datetime.timedelta(days=1)
|
|
exception_start = exception_start.replace(
|
|
hour=event_start.hour, minute=event_start.minute, second=0, microsecond=0
|
|
)
|
|
rrule_set.exrule(
|
|
rrule(
|
|
freq=DAILY,
|
|
dtstart=make_naive(exception_start),
|
|
until=make_naive(exception.end_datetime),
|
|
)
|
|
)
|
|
|
|
event_base = Event(
|
|
agenda=self.agenda,
|
|
primary_event=self,
|
|
slug=self.slug,
|
|
duration=self.duration,
|
|
places=self.places,
|
|
waiting_list_places=self.waiting_list_places,
|
|
publication_date=self.publication_date,
|
|
label=self.label,
|
|
description=self.description,
|
|
pricing=self.pricing,
|
|
url=self.url,
|
|
)
|
|
|
|
if self.publication_date and self.publication_date > min_datetime.date():
|
|
min_datetime = make_aware(datetime.datetime.combine(self.publication_date, datetime.time(0, 0)))
|
|
if self.recurrence_end_date:
|
|
self.recurrence_rule['until'] = datetime.datetime.combine(
|
|
self.recurrence_end_date, datetime.time(0, 0)
|
|
)
|
|
|
|
# remove pytz info because dateutil doesn't support DST changes
|
|
min_datetime = make_naive(min_datetime)
|
|
max_datetime = make_naive(max_datetime)
|
|
rrule_set.rrule(rrule(dtstart=make_naive(self.start_datetime), **self.recurrence_rule))
|
|
|
|
for start_datetime in rrule_set.between(min_datetime, max_datetime, inc=True):
|
|
event = copy.copy(event_base)
|
|
# add timezone back
|
|
aware_start_datetime = make_aware(start_datetime)
|
|
event.slug = '%s%s%s' % (
|
|
event.slug,
|
|
slug_separator,
|
|
aware_start_datetime.strftime('%Y-%m-%d-%H%M'),
|
|
)
|
|
event.start_datetime = aware_start_datetime.astimezone(utc)
|
|
recurrences.append(event)
|
|
|
|
return recurrences
|
|
|
|
def get_recurrence_display(self):
|
|
repeat = str(self.get_repeat_display())
|
|
time = date_format(localtime(self.start_datetime), 'TIME_FORMAT')
|
|
if self.repeat in ('weekly', '2-weeks'):
|
|
day = date_format(localtime(self.start_datetime), 'l')
|
|
return _('%(every_x_days)s on %(day)s at %(time)s') % {
|
|
'every_x_days': repeat,
|
|
'day': day,
|
|
'time': time,
|
|
}
|
|
else:
|
|
return _('%(every_x_days)s at %(time)s') % {'every_x_days': repeat, 'time': time}
|
|
|
|
def get_recurrence_rule(self):
|
|
rrule = {}
|
|
if self.repeat == 'daily':
|
|
rrule['freq'] = DAILY
|
|
elif self.repeat == 'weekly':
|
|
rrule['freq'] = WEEKLY
|
|
rrule['byweekday'] = [localtime(self.start_datetime).weekday()]
|
|
elif self.repeat == '2-weeks':
|
|
rrule['freq'] = WEEKLY
|
|
rrule['byweekday'] = [localtime(self.start_datetime).weekday()]
|
|
rrule['interval'] = 2
|
|
elif self.repeat == 'weekdays':
|
|
rrule['freq'] = WEEKLY
|
|
rrule['byweekday'] = [i for i in range(5)]
|
|
else:
|
|
return None
|
|
return rrule
|
|
|
|
def has_recurrences_booked(self, after=None):
|
|
return Booking.objects.filter(
|
|
event__primary_event=self,
|
|
event__start_datetime__gt=after or now(),
|
|
cancellation_datetime__isnull=True,
|
|
).exists()
|
|
|
|
def create_all_recurrences(self, excluded_datetimes=None):
|
|
max_datetime = datetime.datetime.combine(self.recurrence_end_date, datetime.time(0, 0))
|
|
recurrences = self.get_recurrences(localtime(now()), make_aware(max_datetime), excluded_datetimes)
|
|
Event.objects.bulk_create(recurrences)
|
|
|
|
@property
|
|
def datetime_slug(self):
|
|
assert self.primary_event is not None, 'only for event recurrence'
|
|
|
|
datetime_part = self.slug.rsplit('--')[-1]
|
|
return datetime.datetime.strptime(datetime_part, '%Y-%m-%d-%H%M')
|
|
|
|
|
|
class BookingColor(models.Model):
|
|
COLOR_COUNT = 8
|
|
|
|
label = models.CharField(_('Label'), max_length=250)
|
|
index = models.PositiveSmallIntegerField()
|
|
|
|
class Meta:
|
|
unique_together = ('label',)
|
|
ordering = ('pk',)
|
|
|
|
def save(self, *args, **kwargs):
|
|
if self.index is None:
|
|
last_color = BookingColor.objects.last() or BookingColor(index=-1)
|
|
self.index = (last_color.index + 1) % self.COLOR_COUNT
|
|
super().save(*args, **kwargs)
|
|
|
|
def __str__(self):
|
|
return '%s' % self.label
|
|
|
|
|
|
class Booking(models.Model):
|
|
event = models.ForeignKey(Event, on_delete=models.CASCADE)
|
|
extra_data = JSONField(null=True)
|
|
anonymization_datetime = models.DateTimeField(null=True)
|
|
cancellation_datetime = models.DateTimeField(null=True)
|
|
reminder_datetime = models.DateTimeField(null=True)
|
|
in_waiting_list = models.BooleanField(default=False)
|
|
creation_datetime = models.DateTimeField(auto_now_add=True)
|
|
# primary booking is used to group multiple bookings together
|
|
primary_booking = models.ForeignKey(
|
|
'self', null=True, on_delete=models.CASCADE, related_name='secondary_booking_set'
|
|
)
|
|
|
|
label = models.CharField(max_length=250, blank=True)
|
|
user_display_label = models.CharField(
|
|
verbose_name=_('Label displayed to user'), max_length=250, blank=True
|
|
)
|
|
user_external_id = models.CharField(max_length=250, blank=True)
|
|
user_name = models.CharField(max_length=250, blank=True)
|
|
user_email = models.EmailField(blank=True)
|
|
user_phone_number = models.CharField(max_length=16, blank=True)
|
|
user_was_present = models.NullBooleanField()
|
|
user_absence_reason = models.CharField(max_length=250, blank=True)
|
|
|
|
form_url = models.URLField(blank=True)
|
|
backoffice_url = models.URLField(blank=True)
|
|
cancel_callback_url = models.URLField(blank=True)
|
|
color = models.ForeignKey(BookingColor, null=True, on_delete=models.SET_NULL, related_name='bookings')
|
|
|
|
def save(self, *args, **kwargs):
|
|
with transaction.atomic():
|
|
super(Booking, self).save(*args, **kwargs)
|
|
initial_values = self.event.full, self.event.almost_full
|
|
self.event.check_full()
|
|
if (self.event.full, self.event.almost_full) != initial_values:
|
|
self.event.save()
|
|
|
|
def cancel(self, trigger_callback=False):
|
|
timestamp = now()
|
|
with transaction.atomic():
|
|
self.secondary_booking_set.update(cancellation_datetime=timestamp)
|
|
self.cancellation_datetime = timestamp
|
|
self.save()
|
|
if self.cancel_callback_url and trigger_callback:
|
|
r = requests_wrapper.post(self.cancel_callback_url, remote_service='auto', timeout=15)
|
|
r.raise_for_status()
|
|
|
|
def accept(self):
|
|
self.in_waiting_list = False
|
|
with transaction.atomic():
|
|
self.secondary_booking_set.update(in_waiting_list=False)
|
|
self.save()
|
|
|
|
def suspend(self):
|
|
self.in_waiting_list = True
|
|
with transaction.atomic():
|
|
self.secondary_booking_set.update(in_waiting_list=True)
|
|
self.save()
|
|
|
|
@classmethod
|
|
def anonymize_bookings(cls, bookings_queryset):
|
|
bookings_queryset.update(
|
|
label='',
|
|
user_display_label='',
|
|
user_external_id='',
|
|
user_name='',
|
|
extra_data={},
|
|
anonymization_datetime=now(),
|
|
)
|
|
|
|
def get_ics(self, request=None):
|
|
ics = vobject.iCalendar()
|
|
ics.add('prodid').value = '-//Entr\'ouvert//NON SGML Publik'
|
|
vevent = vobject.newFromBehavior('vevent')
|
|
vevent.add('uid').value = '%s-%s-%s' % (
|
|
self.event.start_datetime.isoformat(),
|
|
self.event.agenda.pk,
|
|
self.pk,
|
|
)
|
|
|
|
vevent.add('summary').value = self.user_display_label or self.label
|
|
vevent.add('dtstart').value = self.event.start_datetime
|
|
if self.user_name:
|
|
vevent.add('attendee').value = self.user_name
|
|
if self.event.end_datetime:
|
|
vevent.add('dtend').value = self.event.end_datetime
|
|
|
|
for field in ('description', 'location', 'comment', 'url'):
|
|
field_value = request and request.GET.get(field) or self.extra_data.get(field)
|
|
if field_value:
|
|
vevent.add(field).value = field_value
|
|
ics.add(vevent)
|
|
return ics.serialize()
|
|
|
|
def clone(self, primary_booking=None, save=True):
|
|
new_booking = copy.deepcopy(self)
|
|
new_booking.id = None
|
|
new_booking.primary_booking = primary_booking
|
|
if save:
|
|
new_booking.save()
|
|
return new_booking
|
|
|
|
def events_display(self):
|
|
name = self.user_name or self.label or _('Unknown')
|
|
return '%s, %s' % (name, date_format(localtime(self.creation_datetime), 'DATETIME_FORMAT'))
|
|
|
|
def meetings_display(self):
|
|
if self.label and self.user_name:
|
|
return '%s - %s' % (self.label, self.user_name)
|
|
elif self.label or self.user_name:
|
|
return self.label or self.user_name
|
|
else:
|
|
return ugettext('booked')
|
|
|
|
|
|
OpeningHour = collections.namedtuple('OpeningHour', ['begin', 'end'])
|
|
|
|
|
|
class Desk(models.Model):
|
|
agenda = models.ForeignKey(Agenda, on_delete=models.CASCADE)
|
|
label = models.CharField(_('Label'), max_length=150)
|
|
slug = models.SlugField(_('Identifier'), max_length=160)
|
|
|
|
def __str__(self):
|
|
return self.label
|
|
|
|
class Meta:
|
|
ordering = ['label', 'slug']
|
|
unique_together = ['agenda', 'slug']
|
|
|
|
def save(self, *args, **kwargs):
|
|
assert self.agenda.kind != 'virtual', "a desk can't reference a virtual agenda"
|
|
if not self.slug:
|
|
self.slug = generate_slug(self, agenda=self.agenda)
|
|
super(Desk, self).save(*args, **kwargs)
|
|
|
|
@property
|
|
def base_slug(self):
|
|
return slugify(self.label)
|
|
|
|
@classmethod
|
|
def import_json(cls, data):
|
|
timeperiods = data.pop('timeperiods', [])
|
|
exceptions = data.pop('exceptions', [])
|
|
sources = data.pop('exception_sources', [])
|
|
unavailability_calendars = data.pop('unavailability_calendars', [])
|
|
data = clean_import_data(cls, data)
|
|
desk, created = cls.objects.update_or_create(slug=data['slug'], agenda=data['agenda'], defaults=data)
|
|
for timeperiod in timeperiods:
|
|
timeperiod['desk'] = desk
|
|
TimePeriod.import_json(timeperiod)
|
|
for exception in exceptions:
|
|
exception['desk'] = desk
|
|
TimePeriodException.import_json(exception)
|
|
for source in sources:
|
|
source['desk'] = desk
|
|
TimePeriodExceptionSource.import_json(source)
|
|
for unavailability_calendar in unavailability_calendars:
|
|
slug = unavailability_calendar['slug']
|
|
try:
|
|
target_calendar = UnavailabilityCalendar.objects.get(slug=slug)
|
|
except UnavailabilityCalendar.DoesNotExist:
|
|
raise AgendaImportError(_('The unavailability calendar "%s" does not exist.') % slug)
|
|
desk.unavailability_calendars.add(target_calendar)
|
|
|
|
def export_json(self):
|
|
time_period_exceptions = self.timeperiodexception_set.filter(source__settings_slug__isnull=True)
|
|
time_period_exception_sources = self.timeperiodexceptionsource_set.filter(settings_slug__isnull=False)
|
|
return {
|
|
'label': self.label,
|
|
'slug': self.slug,
|
|
'timeperiods': [time_period.export_json() for time_period in self.timeperiod_set.filter()],
|
|
'exceptions': [exception.export_json() for exception in time_period_exceptions],
|
|
'exception_sources': [source.export_json() for source in time_period_exception_sources],
|
|
'unavailability_calendars': [{'slug': x.slug} for x in self.unavailability_calendars.all()],
|
|
}
|
|
|
|
def duplicate(self, label=None, agenda_target=None):
|
|
# clone current desk
|
|
new_desk = copy.deepcopy(self)
|
|
new_desk.pk = None
|
|
# set label
|
|
new_desk.label = label or new_desk.label
|
|
# reset slug
|
|
new_desk.slug = None
|
|
# set agenda
|
|
if agenda_target:
|
|
new_desk.agenda = agenda_target
|
|
# store new desk
|
|
new_desk.save()
|
|
|
|
# clone related objects
|
|
for time_period in self.timeperiod_set.all():
|
|
time_period.duplicate(desk_target=new_desk)
|
|
for time_period_exception in self.timeperiodexception_set.filter(source__isnull=True):
|
|
time_period_exception.duplicate(desk_target=new_desk)
|
|
for time_period_exception_source in self.timeperiodexceptionsource_set.all():
|
|
time_period_exception_source.duplicate(desk_target=new_desk)
|
|
new_desk.unavailability_calendars.set(self.unavailability_calendars.all())
|
|
|
|
return new_desk
|
|
|
|
def get_exceptions_within_two_weeks(self):
|
|
# prefetched_exceptions contains desks exceptions + unavailability_calendars exceptions
|
|
# default ordering: start_datetime
|
|
in_two_weeks = make_aware(datetime.datetime.today() + datetime.timedelta(days=14))
|
|
exceptions = []
|
|
for exception in self.prefetched_exceptions:
|
|
if exception.end_datetime < now():
|
|
# exception ends in the past, skip it
|
|
continue
|
|
if exception.end_datetime <= in_two_weeks:
|
|
# ends in less than 2 weeks
|
|
exceptions.append(exception)
|
|
elif exception.start_datetime < now():
|
|
# has already started
|
|
exceptions.append(exception)
|
|
if exceptions:
|
|
return exceptions
|
|
# if none found within the 2 coming weeks, return the next one
|
|
for exception in self.prefetched_exceptions:
|
|
if exception.start_datetime < now():
|
|
# exception starts in the past, skip it
|
|
continue
|
|
# returns the first exception found
|
|
return [exception]
|
|
return []
|
|
|
|
def are_all_exceptions_displayed(self):
|
|
in_two_weeks = self.get_exceptions_within_two_weeks()
|
|
return len(self.prefetched_exceptions) == len(in_two_weeks)
|
|
|
|
def get_opening_hours(self, date):
|
|
openslots = IntervalSet()
|
|
for timeperiod in self.timeperiod_set.all():
|
|
# timeperiod_set.all() are prefetched, do not filter in queryset
|
|
if timeperiod.weekday != date.weekday():
|
|
continue
|
|
start_datetime = make_aware(datetime.datetime.combine(date, timeperiod.start_time))
|
|
end_datetime = make_aware(datetime.datetime.combine(date, timeperiod.end_time))
|
|
openslots.add(start_datetime, end_datetime)
|
|
|
|
aware_date = make_aware(datetime.datetime(date.year, date.month, date.day))
|
|
exceptions = IntervalSet()
|
|
aware_next_date = aware_date + datetime.timedelta(days=1)
|
|
for exception in self.prefetched_exceptions:
|
|
if exception.end_datetime < aware_date:
|
|
continue
|
|
if exception.start_datetime > aware_next_date:
|
|
continue
|
|
exceptions.add(exception.start_datetime, exception.end_datetime)
|
|
|
|
return [OpeningHour(*time_range) for time_range in (openslots - exceptions)]
|
|
|
|
def import_timeperiod_exceptions_from_settings(self, enable=False, spool=True):
|
|
start_update = now()
|
|
for slug, source_info in settings.EXCEPTIONS_SOURCES.items():
|
|
label = source_info['label']
|
|
try:
|
|
source = TimePeriodExceptionSource.objects.get(desk=self, settings_slug=slug)
|
|
except TimePeriodExceptionSource.DoesNotExist:
|
|
source = TimePeriodExceptionSource.objects.create(
|
|
desk=self, settings_slug=slug, enabled=False
|
|
)
|
|
source.settings_label = _(label)
|
|
source.save()
|
|
if enable or source.enabled: # if already enabled, update anyway
|
|
source.enable(spool=spool)
|
|
TimePeriodExceptionSource.objects.filter(
|
|
desk=self, settings_slug__isnull=False, last_update__lt=start_update
|
|
).delete() # source was not in settings anymore
|
|
|
|
|
|
class Resource(models.Model):
|
|
slug = models.SlugField(_('Identifier'), max_length=160, unique=True)
|
|
label = models.CharField(_('Label'), max_length=150)
|
|
description = models.TextField(_('Description'), blank=True, help_text=_('Optional description.'))
|
|
|
|
def __str__(self):
|
|
return self.label
|
|
|
|
class Meta:
|
|
ordering = ['label']
|
|
|
|
def save(self, *args, **kwargs):
|
|
if not self.slug:
|
|
self.slug = generate_slug(self)
|
|
super().save(*args, **kwargs)
|
|
|
|
@property
|
|
def base_slug(self):
|
|
return slugify(self.label)
|
|
|
|
|
|
class Category(models.Model):
|
|
slug = models.SlugField(_('Identifier'), max_length=160, unique=True)
|
|
label = models.CharField(_('Label'), max_length=150)
|
|
|
|
def __str__(self):
|
|
return self.label
|
|
|
|
class Meta:
|
|
ordering = ['label']
|
|
|
|
def save(self, *args, **kwargs):
|
|
if not self.slug:
|
|
self.slug = generate_slug(self)
|
|
super().save(*args, **kwargs)
|
|
|
|
@property
|
|
def base_slug(self):
|
|
return slugify(self.label)
|
|
|
|
|
|
def ics_directory_path(instance, filename):
|
|
return 'ics/{0}/{1}'.format(str(uuid.uuid4()), filename)
|
|
|
|
|
|
class TimePeriodExceptionSource(models.Model):
|
|
desk = models.ForeignKey(Desk, on_delete=models.CASCADE)
|
|
ics_filename = models.CharField(null=True, max_length=256)
|
|
ics_file = models.FileField(upload_to=ics_directory_path, blank=True, null=True)
|
|
ics_url = models.URLField(null=True, max_length=500)
|
|
settings_slug = models.CharField(null=True, max_length=150)
|
|
settings_label = models.CharField(null=True, max_length=150)
|
|
last_update = models.DateTimeField(auto_now=True, null=True)
|
|
enabled = models.BooleanField(default=True)
|
|
|
|
class Meta:
|
|
unique_together = ['desk', 'settings_slug']
|
|
|
|
def __str__(self):
|
|
if self.ics_filename is not None:
|
|
return self.ics_filename
|
|
if self.settings_label is not None:
|
|
return ugettext(self.settings_label)
|
|
return self.ics_url
|
|
|
|
def duplicate(self, desk_target=None):
|
|
# clone current source
|
|
new_source = copy.deepcopy(self)
|
|
new_source.pk = None
|
|
# set desk
|
|
new_source.desk = desk_target or self.desk
|
|
# set ics_file
|
|
if self.ics_file:
|
|
with open(self.ics_file.path) as ics_file:
|
|
new_source.ics_file.save(self.ics_filename, ics_file, save=False)
|
|
# store new source
|
|
new_source.save()
|
|
# clone related objects
|
|
for time_period_exception in self.timeperiodexception_set.all():
|
|
time_period_exception.duplicate(desk_target=desk_target, source_target=new_source)
|
|
|
|
return new_source
|
|
|
|
def enable(self, spool=True):
|
|
self.enabled = True
|
|
self.save()
|
|
|
|
if spool and 'uwsgi' in sys.modules:
|
|
from chrono.utils.spooler import refresh_exceptions_from_settings
|
|
|
|
tenant = getattr(connection, 'tenant', None)
|
|
transaction.on_commit(
|
|
lambda: refresh_exceptions_from_settings.spool(
|
|
source_id=str(self.pk), domain=getattr(tenant, 'domain_url', None)
|
|
)
|
|
)
|
|
return
|
|
|
|
self.refresh_from_settings()
|
|
|
|
def refresh_from_settings(self):
|
|
if not self.enabled:
|
|
return
|
|
source_info = settings.EXCEPTIONS_SOURCES.get(self.settings_slug)
|
|
if not source_info:
|
|
return
|
|
source_class = import_string(source_info['class'])
|
|
calendar = source_class()
|
|
this_year = now().year
|
|
days = [day for year in range(this_year, this_year + 3) for day in calendar.holidays(year)]
|
|
with transaction.atomic():
|
|
self.timeperiodexception_set.all().delete()
|
|
for day, label in days:
|
|
start_datetime = make_aware(datetime.datetime.combine(day, datetime.datetime.min.time()))
|
|
end_datetime = start_datetime + datetime.timedelta(days=1)
|
|
TimePeriodException.objects.create(
|
|
desk=self.desk,
|
|
source=self,
|
|
label=_(label),
|
|
start_datetime=start_datetime,
|
|
end_datetime=end_datetime,
|
|
)
|
|
|
|
def disable(self):
|
|
self.timeperiodexception_set.all().delete()
|
|
self.enabled = False
|
|
self.save()
|
|
|
|
def _check_ics_content(self):
|
|
if self.ics_url:
|
|
try:
|
|
response = requests.get(self.ics_url, proxies=settings.REQUESTS_PROXIES)
|
|
response.raise_for_status()
|
|
except requests.HTTPError as e:
|
|
raise ICSError(
|
|
_('Failed to retrieve remote calendar (%(url)s, HTTP error %(status_code)s).')
|
|
% {'url': self.ics_url, 'status_code': e.response.status_code}
|
|
)
|
|
except requests.RequestException as e:
|
|
raise ICSError(
|
|
_('Failed to retrieve remote calendar (%(url)s, %(exception)s).')
|
|
% {'url': self.ics_url, 'exception': e}
|
|
)
|
|
try:
|
|
# override response encoding received in HTTP headers as it may
|
|
# often be missing and defaults to iso-8859-15.
|
|
response.content.decode('utf-8')
|
|
response.encoding = 'utf-8'
|
|
except UnicodeDecodeError:
|
|
pass
|
|
data = response.text
|
|
else:
|
|
data = force_text(self.ics_file.read())
|
|
|
|
try:
|
|
parsed = vobject.readOne(data)
|
|
except vobject.base.ParseError:
|
|
raise ICSError(_('File format is invalid.'))
|
|
|
|
if not parsed.contents.get('vevent'):
|
|
raise ICSError(_('The file doesn\'t contain any events.'))
|
|
|
|
for vevent in parsed.contents.get('vevent', []):
|
|
summary = self._get_summary_from_vevent(vevent)
|
|
try:
|
|
vevent.dtstart.value
|
|
except AttributeError:
|
|
raise ICSError(_('Event "%s" has no start date.') % summary)
|
|
|
|
return parsed
|
|
|
|
def _get_summary_from_vevent(self, vevent):
|
|
if 'summary' in vevent.contents:
|
|
return force_text(vevent.contents['summary'][0].value)
|
|
return _('Exception')
|
|
|
|
def refresh_timeperiod_exceptions(self, data=None):
|
|
if 'uwsgi' in sys.modules:
|
|
from chrono.utils.spooler import refresh_exception_source
|
|
|
|
tenant = getattr(connection, 'tenant', None)
|
|
transaction.on_commit(
|
|
lambda: refresh_exception_source.spool(
|
|
source_id=str(self.pk), domain=getattr(tenant, 'domain_url', None)
|
|
)
|
|
)
|
|
return
|
|
|
|
self.refresh_timeperiod_exceptions_from_ics(data=data)
|
|
|
|
def refresh_timeperiod_exceptions_from_ics(self, data=None, recurring_days=600):
|
|
if data is None:
|
|
parsed = self._check_ics_content()
|
|
else:
|
|
parsed = data
|
|
|
|
with transaction.atomic():
|
|
# delete old exceptions related to this source
|
|
self.timeperiodexception_set.all().delete()
|
|
# create new exceptions
|
|
update_datetime = now()
|
|
for vevent in parsed.contents.get('vevent', []):
|
|
summary = self._get_summary_from_vevent(vevent)
|
|
try:
|
|
start_dt = vevent.dtstart.value
|
|
if not isinstance(start_dt, datetime.datetime):
|
|
start_dt = datetime.datetime.combine(start_dt, datetime.datetime.min.time())
|
|
if not is_aware(start_dt):
|
|
start_dt = make_aware(start_dt)
|
|
except AttributeError:
|
|
raise ICSError(_('Event "%s" has no start date.') % summary)
|
|
try:
|
|
end_dt = vevent.dtend.value
|
|
if not isinstance(end_dt, datetime.datetime):
|
|
end_dt = datetime.datetime.combine(end_dt, datetime.datetime.min.time())
|
|
if not is_aware(end_dt):
|
|
end_dt = make_aware(end_dt)
|
|
duration = end_dt - start_dt
|
|
except AttributeError:
|
|
try:
|
|
duration = vevent.duration.value
|
|
end_dt = start_dt + duration
|
|
except AttributeError:
|
|
# events without end date are considered as ending the same day
|
|
end_dt = make_aware(datetime.datetime.combine(start_dt, datetime.datetime.max.time()))
|
|
duration = end_dt - start_dt
|
|
|
|
event = {
|
|
'start_datetime': start_dt,
|
|
'end_datetime': end_dt,
|
|
'label': summary,
|
|
'desk_id': self.desk_id,
|
|
'source': self,
|
|
'recurrence_id': 0,
|
|
}
|
|
|
|
if not vevent.rruleset:
|
|
# classical event
|
|
TimePeriodException.objects.create(**event)
|
|
elif vevent.rruleset.count():
|
|
# recurring event until recurring_days in the future
|
|
from_dt = start_dt
|
|
until_dt = update_datetime + datetime.timedelta(days=recurring_days)
|
|
if not is_aware(vevent.rruleset[0]):
|
|
from_dt = make_naive(from_dt)
|
|
until_dt = make_naive(until_dt)
|
|
i = -1
|
|
for i, start_dt in enumerate(vevent.rruleset.between(from_dt, until_dt, inc=True)):
|
|
# recompute start_dt and end_dt from occurrences and duration
|
|
if not is_aware(start_dt):
|
|
start_dt = make_aware(start_dt)
|
|
end_dt = start_dt + duration
|
|
event['recurrence_id'] = i
|
|
event['start_datetime'] = start_dt
|
|
event['end_datetime'] = end_dt
|
|
if end_dt >= update_datetime:
|
|
TimePeriodException.objects.create(**event)
|
|
|
|
@classmethod
|
|
def import_json(cls, data):
|
|
data = clean_import_data(cls, data)
|
|
source, _ = cls.objects.update_or_create(**data)
|
|
if source.enabled:
|
|
source.enable()
|
|
|
|
def export_json(self):
|
|
'''Export only sources from settings.'''
|
|
return {
|
|
'settings_slug': self.settings_slug,
|
|
'settings_label': self.settings_label,
|
|
'enabled': self.enabled,
|
|
}
|
|
|
|
|
|
class UnavailabilityCalendar(models.Model):
|
|
label = models.CharField(_('Label'), max_length=150)
|
|
slug = models.SlugField(_('Identifier'), max_length=160, unique=True)
|
|
desks = models.ManyToManyField(Desk, related_name='unavailability_calendars')
|
|
edit_role = models.ForeignKey(
|
|
Group,
|
|
blank=True,
|
|
null=True,
|
|
default=None,
|
|
related_name='+',
|
|
verbose_name=_('Edit Role'),
|
|
on_delete=models.SET_NULL,
|
|
)
|
|
view_role = models.ForeignKey(
|
|
Group,
|
|
blank=True,
|
|
null=True,
|
|
default=None,
|
|
related_name='+',
|
|
verbose_name=_('View Role'),
|
|
on_delete=models.SET_NULL,
|
|
)
|
|
|
|
class Meta:
|
|
ordering = ['label']
|
|
|
|
def __str__(self):
|
|
return self.label
|
|
|
|
@property
|
|
def base_slug(self):
|
|
return slugify(self.label)
|
|
|
|
def save(self, *args, **kwargs):
|
|
if not self.slug:
|
|
self.slug = generate_slug(self)
|
|
super(UnavailabilityCalendar, self).save(*args, **kwargs)
|
|
|
|
def can_be_managed(self, user):
|
|
if user.is_staff:
|
|
return True
|
|
group_ids = [x.id for x in user.groups.all()]
|
|
return bool(self.edit_role_id in group_ids)
|
|
|
|
def can_be_viewed(self, user):
|
|
if self.can_be_managed(user):
|
|
return True
|
|
group_ids = [x.id for x in user.groups.all()]
|
|
return bool(self.view_role_id in group_ids)
|
|
|
|
def get_absolute_url(self):
|
|
return reverse('chrono-manager-unavailability-calendar-view', kwargs={'pk': self.id})
|
|
|
|
def export_json(self):
|
|
unavailability_calendar = {
|
|
'label': self.label,
|
|
'slug': self.slug,
|
|
'permissions': {
|
|
'view': self.view_role.name if self.view_role else None,
|
|
'edit': self.edit_role.name if self.edit_role else None,
|
|
},
|
|
'exceptions': [exception.export_json() for exception in self.timeperiodexception_set.all()],
|
|
}
|
|
return unavailability_calendar
|
|
|
|
@classmethod
|
|
def import_json(cls, data, overwrite=False):
|
|
data = data.copy()
|
|
permissions = data.pop('permissions', {})
|
|
exceptions = data.pop('exceptions', [])
|
|
for permission in ('view', 'edit'):
|
|
if permissions.get(permission):
|
|
data[permission + '_role'] = Group.objects.get(name=permissions[permission])
|
|
data = clean_import_data(cls, data)
|
|
unavailability_calendar, created = cls.objects.update_or_create(slug=data['slug'], defaults=data)
|
|
if overwrite:
|
|
TimePeriodException.objects.filter(unavailability_calendar=unavailability_calendar).delete()
|
|
for exception in exceptions:
|
|
exception['unavailability_calendar'] = unavailability_calendar
|
|
TimePeriodException.import_json(exception)
|
|
|
|
return created
|
|
|
|
|
|
class TimePeriodException(models.Model):
|
|
desk = models.ForeignKey(Desk, on_delete=models.CASCADE, null=True)
|
|
unavailability_calendar = models.ForeignKey(UnavailabilityCalendar, on_delete=models.CASCADE, null=True)
|
|
source = models.ForeignKey(TimePeriodExceptionSource, on_delete=models.CASCADE, null=True)
|
|
label = models.CharField(_('Optional Label'), max_length=150, blank=True, null=True)
|
|
start_datetime = models.DateTimeField(_('Exception start time'))
|
|
end_datetime = models.DateTimeField(_('Exception end time'))
|
|
update_datetime = models.DateTimeField(auto_now=True)
|
|
recurrence_id = models.PositiveIntegerField(_('Recurrence ID'), default=0)
|
|
|
|
@property
|
|
def read_only(self):
|
|
if self.source_id:
|
|
return True
|
|
if self.unavailability_calendar_id:
|
|
return True
|
|
return False
|
|
|
|
class Meta:
|
|
ordering = ['start_datetime']
|
|
|
|
def __str__(self):
|
|
if is_midnight(self.start_datetime) and is_midnight(self.end_datetime):
|
|
# if both dates are at midnight don't include the time part
|
|
if self.end_datetime == self.start_datetime + datetime.timedelta(days=1):
|
|
# a single day
|
|
exc_repr = u'%s' % date_format(localtime(self.start_datetime), 'SHORT_DATE_FORMAT')
|
|
else:
|
|
exc_repr = u'%s → %s' % (
|
|
date_format(localtime(self.start_datetime), 'SHORT_DATE_FORMAT'),
|
|
date_format(localtime(self.end_datetime), 'SHORT_DATE_FORMAT'),
|
|
)
|
|
else:
|
|
if localtime(self.start_datetime).date() == localtime(self.end_datetime).date():
|
|
# same day
|
|
exc_repr = u'%s → %s' % (
|
|
date_format(localtime(self.start_datetime), 'SHORT_DATETIME_FORMAT'),
|
|
date_format(localtime(self.end_datetime), 'TIME_FORMAT'),
|
|
)
|
|
else:
|
|
exc_repr = u'%s → %s' % (
|
|
date_format(localtime(self.start_datetime), 'SHORT_DATETIME_FORMAT'),
|
|
date_format(localtime(self.end_datetime), 'SHORT_DATETIME_FORMAT'),
|
|
)
|
|
|
|
if self.label:
|
|
exc_repr = u'%s (%s)' % (self.label, exc_repr)
|
|
|
|
return exc_repr
|
|
|
|
def has_booking_within_time_slot(self, target_desk=None):
|
|
if not (self.start_datetime and self.end_datetime):
|
|
# incomplete time period, can't tell
|
|
return False
|
|
|
|
query = Event.objects
|
|
if self.desk:
|
|
query = query.filter(desk=self.desk)
|
|
elif self.unavailability_calendar and not target_desk:
|
|
query = query.filter(desk__in=self.unavailability_calendar.desks.all())
|
|
elif target_desk:
|
|
query = query.filter(desk=target_desk)
|
|
else:
|
|
# orphan exception
|
|
return False
|
|
|
|
for event in query.filter(booking__isnull=False, booking__cancellation_datetime__isnull=True):
|
|
if self.start_datetime <= event.start_datetime < self.end_datetime:
|
|
return True
|
|
if event.meeting_type:
|
|
if (
|
|
event.start_datetime
|
|
<= self.start_datetime
|
|
< event.start_datetime + datetime.timedelta(minutes=event.meeting_type.duration)
|
|
):
|
|
return True
|
|
return False
|
|
|
|
@classmethod
|
|
def import_json(cls, data):
|
|
def import_datetime(s):
|
|
'''Import datetime as a naive ISO8601 serialization'''
|
|
return make_aware(datetime.datetime.strptime(s, '%Y-%m-%d %H:%M:%S'))
|
|
|
|
for k, v in data.items():
|
|
if k.endswith('_datetime'):
|
|
data[k] = import_datetime(v)
|
|
data = clean_import_data(cls, data)
|
|
query_data = data.copy()
|
|
query_data.pop('update_datetime')
|
|
try:
|
|
cls.objects.update_or_create(defaults=data, **query_data)
|
|
except cls.MultipleObjectsReturned:
|
|
cls.objects.filter(**query_data).update(update_datetime=data['update_datetime'])
|
|
|
|
def export_json(self):
|
|
def export_datetime(dt):
|
|
'''Export datetime as a naive ISO8601 serialization'''
|
|
return make_naive(dt).strftime('%Y-%m-%d %H:%M:%S')
|
|
|
|
return {
|
|
'label': self.label,
|
|
'start_datetime': export_datetime(self.start_datetime),
|
|
'end_datetime': export_datetime(self.end_datetime),
|
|
'recurrence_id': self.recurrence_id,
|
|
'update_datetime': export_datetime(self.update_datetime),
|
|
}
|
|
|
|
def duplicate(self, desk_target=None, source_target=None):
|
|
# clone current exception
|
|
new_exception = copy.deepcopy(self)
|
|
new_exception.pk = None
|
|
# set desk
|
|
new_exception.desk = desk_target or self.desk
|
|
# set source
|
|
new_exception.source = source_target or self.source
|
|
# store new exception
|
|
new_exception.save()
|
|
|
|
return new_exception
|
|
|
|
def as_interval(self):
|
|
'''Simplify insertion into IntervalSet'''
|
|
return Interval(self.start_datetime, self.end_datetime)
|
|
|
|
|
|
class EventCancellationReport(models.Model):
|
|
event = models.ForeignKey(Event, related_name='cancellation_reports', on_delete=models.CASCADE)
|
|
timestamp = models.DateTimeField(auto_now_add=True)
|
|
seen = models.BooleanField(default=False)
|
|
bookings = models.ManyToManyField(Booking)
|
|
booking_errors = JSONField()
|
|
|
|
def __str__(self):
|
|
return '%s - %s' % (self.timestamp.strftime('%Y-%m-%d %H:%M:%S'), self.event)
|
|
|
|
class Meta:
|
|
ordering = ['-timestamp']
|
|
|
|
|
|
class NotificationType:
|
|
def __init__(self, name, related_field, settings):
|
|
self.name = name
|
|
self.related_field = related_field
|
|
self.settings = settings
|
|
|
|
@property
|
|
def enabled(self):
|
|
choice = getattr(self.settings, self.name)
|
|
if not choice:
|
|
return False
|
|
|
|
if choice == self.settings.EMAIL_FIELD:
|
|
return bool(getattr(self.settings, self.name + '_emails'))
|
|
|
|
return True
|
|
|
|
def get_recipients(self):
|
|
choice = getattr(self.settings, self.name)
|
|
if not choice:
|
|
return []
|
|
|
|
if choice == self.settings.EMAIL_FIELD:
|
|
return getattr(self.settings, self.name + '_emails')
|
|
|
|
role = self.settings.get_role_from_choice(choice)
|
|
if not role or not hasattr(role, 'role'):
|
|
return []
|
|
emails = role.role.emails
|
|
if role.role.emails_to_members:
|
|
emails.extend(role.user_set.values_list('email', flat=True))
|
|
return emails
|
|
|
|
@property
|
|
def display_value(self):
|
|
choice = getattr(self.settings, self.name)
|
|
if not choice:
|
|
return ''
|
|
|
|
if choice == self.settings.EMAIL_FIELD:
|
|
emails = getattr(self.settings, self.name + '_emails')
|
|
return ', '.join(emails)
|
|
|
|
role = self.settings.get_role_from_choice(choice) or _('undefined')
|
|
display_name = getattr(self.settings, 'get_%s_display' % self.name)()
|
|
return '%s (%s)' % (display_name, role)
|
|
|
|
@property
|
|
def label(self):
|
|
return self.settings._meta.get_field(self.name).verbose_name
|
|
|
|
|
|
class AgendaNotificationsSettings(models.Model):
|
|
EMAIL_FIELD = 'use-email-field'
|
|
VIEW_ROLE = 'view-role'
|
|
EDIT_ROLE = 'edit-role'
|
|
|
|
CHOICES = [
|
|
(EDIT_ROLE, _('Edit Role')),
|
|
(VIEW_ROLE, _('View Role')),
|
|
(EMAIL_FIELD, _('Specify email addresses manually')),
|
|
]
|
|
|
|
agenda = models.OneToOneField(Agenda, on_delete=models.CASCADE, related_name='notifications_settings')
|
|
|
|
almost_full_event = models.CharField(
|
|
max_length=16, blank=True, choices=CHOICES, verbose_name=_('Almost full event (90%)')
|
|
)
|
|
almost_full_event_emails = ArrayField(models.EmailField(), blank=True, null=True)
|
|
|
|
full_event = models.CharField(max_length=16, blank=True, choices=CHOICES, verbose_name=_('Full event'))
|
|
full_event_emails = ArrayField(models.EmailField(), blank=True, null=True)
|
|
|
|
cancelled_event = models.CharField(
|
|
max_length=16, blank=True, choices=CHOICES, verbose_name=_('Cancelled event')
|
|
)
|
|
cancelled_event_emails = ArrayField(models.EmailField(), blank=True, null=True)
|
|
|
|
@classmethod
|
|
def get_email_field_names(cls):
|
|
return [field.name for field in cls._meta.get_fields() if isinstance(field, ArrayField)]
|
|
|
|
@staticmethod
|
|
def get_role_field_names():
|
|
return ['almost_full_event', 'full_event', 'cancelled_event']
|
|
|
|
def get_notification_types(self):
|
|
for field in self.get_role_field_names():
|
|
notification_type = NotificationType(
|
|
name=field, related_field=field.replace('_event', ''), settings=self
|
|
)
|
|
if notification_type.enabled:
|
|
yield notification_type
|
|
|
|
def get_role_from_choice(self, choice):
|
|
if choice == self.EDIT_ROLE:
|
|
return self.agenda.edit_role
|
|
elif choice == self.VIEW_ROLE:
|
|
return self.agenda.view_role
|
|
|
|
@classmethod
|
|
def import_json(cls, data):
|
|
data = clean_import_data(cls, data)
|
|
agenda = data.pop('agenda')
|
|
cls.objects.update_or_create(agenda=agenda, defaults=data)
|
|
|
|
def export_json(self):
|
|
return {
|
|
'almost_full_event': self.almost_full_event,
|
|
'almost_full_event_emails': self.almost_full_event_emails,
|
|
'full_event': self.full_event,
|
|
'full_event_emails': self.full_event_emails,
|
|
'cancelled_event': self.cancelled_event,
|
|
'cancelled_event_emails': self.cancelled_event_emails,
|
|
}
|
|
|
|
|
|
class AgendaReminderSettings(models.Model):
|
|
ONE_DAY_BEFORE = 1
|
|
TWO_DAYS_BEFORE = 2
|
|
THREE_DAYS_BEFORE = 3
|
|
|
|
CHOICES = [
|
|
(None, _('Never')),
|
|
(ONE_DAY_BEFORE, _('One day before')),
|
|
(TWO_DAYS_BEFORE, _('Two days before')),
|
|
(THREE_DAYS_BEFORE, _('Three days before')),
|
|
]
|
|
|
|
agenda = models.OneToOneField(Agenda, on_delete=models.CASCADE, related_name='reminder_settings')
|
|
days = models.IntegerField(null=True, blank=True, choices=CHOICES, verbose_name=_('Send reminder'))
|
|
send_email = models.BooleanField(default=False, verbose_name=_('Notify by email'))
|
|
email_extra_info = models.TextField(
|
|
blank=True,
|
|
verbose_name=_('Additional text to incude in emails'),
|
|
help_text=_('Basic information such as event name, time and date are already included.'),
|
|
)
|
|
send_sms = models.BooleanField(default=False, verbose_name=_('Notify by SMS'))
|
|
sms_extra_info = models.TextField(
|
|
blank=True,
|
|
verbose_name=_('Additional text to incude in SMS'),
|
|
help_text=_('Basic information such as event name, time and date are already included.'),
|
|
)
|
|
|
|
def display_info(self):
|
|
message = ungettext(
|
|
'Users will be reminded of their booking %(by_email_or_sms)s, one day in advance.',
|
|
'Users will be reminded of their booking %(by_email_or_sms)s, %(days)s days in advance.',
|
|
self.days,
|
|
)
|
|
|
|
if self.send_sms and self.send_email:
|
|
by = _('both by email and by SMS')
|
|
elif self.send_sms:
|
|
by = _('by SMS')
|
|
elif self.send_email:
|
|
by = _('by email')
|
|
|
|
return message % {'days': self.days, 'by_email_or_sms': by}
|
|
|
|
@classmethod
|
|
def import_json(cls, data):
|
|
data = clean_import_data(cls, data)
|
|
agenda = data.pop('agenda')
|
|
cls.objects.update_or_create(agenda=agenda, defaults=data)
|
|
|
|
def export_json(self):
|
|
return {
|
|
'days': self.days,
|
|
'send_email': self.send_email,
|
|
'email_extra_info': self.email_extra_info,
|
|
'send_sms': self.send_sms,
|
|
'sms_extra_info': self.sms_extra_info,
|
|
}
|
|
|
|
|
|
class AbsenceReasonGroup(models.Model):
|
|
slug = models.SlugField(_('Identifier'), max_length=160, unique=True)
|
|
label = models.CharField(_('Label'), max_length=150)
|
|
|
|
class Meta:
|
|
ordering = ['label']
|
|
|
|
def __str__(self):
|
|
return self.label
|
|
|
|
def save(self, *args, **kwargs):
|
|
if not self.slug:
|
|
self.slug = generate_slug(self)
|
|
super().save(*args, **kwargs)
|
|
|
|
@property
|
|
def base_slug(self):
|
|
return slugify(self.label)
|
|
|
|
|
|
class AbsenceReason(models.Model):
|
|
group = models.ForeignKey(AbsenceReasonGroup, on_delete=models.CASCADE, related_name='absence_reasons')
|
|
label = models.CharField(_('Label'), max_length=150)
|
|
|
|
class Meta:
|
|
ordering = ['label']
|
|
|
|
def __str__(self):
|
|
return self.label
|