2017-07-07 08:44:24 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# combo-plugin-gnm - Combo GNM plugin
|
|
|
|
# Copyright (C) 2017 Entr'ouvert
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify it
|
|
|
|
# under the terms of the GNU Affero General Public License as published
|
|
|
|
# by the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Affero General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2018-03-02 17:54:59 +01:00
|
|
|
from collections import OrderedDict
|
2017-07-07 08:44:24 +02:00
|
|
|
import datetime
|
2018-08-15 21:54:28 +02:00
|
|
|
import json
|
2018-03-02 17:54:59 +01:00
|
|
|
import operator
|
2017-09-24 13:01:18 +02:00
|
|
|
import random
|
2017-07-07 08:44:24 +02:00
|
|
|
import re
|
2021-03-16 09:56:35 +01:00
|
|
|
import urllib.parse
|
2017-07-07 08:44:24 +02:00
|
|
|
|
2019-05-26 11:03:29 +02:00
|
|
|
from pyproj import Geod
|
2017-09-25 18:09:09 +02:00
|
|
|
from requests import RequestException
|
|
|
|
|
2017-07-07 08:44:24 +02:00
|
|
|
from django import template
|
2017-09-13 15:35:26 +02:00
|
|
|
from django.conf import settings
|
2018-08-15 21:54:28 +02:00
|
|
|
from django.core.serializers.json import DjangoJSONEncoder
|
2018-10-04 15:38:21 +02:00
|
|
|
from django.core import signing
|
2019-10-14 20:31:50 +02:00
|
|
|
from django.utils import six
|
2020-02-26 10:40:19 +01:00
|
|
|
from django.utils.dateparse import parse_date, parse_datetime
|
2018-03-31 20:15:07 +02:00
|
|
|
from django.utils.http import quote
|
2018-08-15 21:54:28 +02:00
|
|
|
from django.utils.html import format_html
|
2017-09-24 13:01:18 +02:00
|
|
|
from django.utils.text import slugify
|
2019-10-27 17:18:39 +01:00
|
|
|
from django.utils.timezone import now, is_naive, make_aware
|
2018-03-02 17:54:59 +01:00
|
|
|
from django.utils.safestring import mark_safe
|
2017-07-07 08:44:24 +02:00
|
|
|
|
2019-05-26 11:03:29 +02:00
|
|
|
from combo.apps.dashboard.models import DashboardCell, Tile
|
2018-06-22 13:21:36 +02:00
|
|
|
from combo.apps.maps.models import Map, MapLayer
|
2018-09-16 17:06:36 +02:00
|
|
|
from combo.data.models import Page, CellBase, ConfigJsonCell
|
2017-09-24 13:01:18 +02:00
|
|
|
from combo.public.views import render_cell
|
|
|
|
from combo.utils import requests
|
2017-08-21 11:44:12 +02:00
|
|
|
|
2017-07-07 08:44:24 +02:00
|
|
|
register = template.Library()
|
|
|
|
|
2018-03-02 17:54:59 +01:00
|
|
|
FR_WEEKDAYS = ['lundi', 'mardi', 'mercredi', 'jeudi', 'vendredi', 'samedi', 'dimanche']
|
|
|
|
EN_ABBREV_WEEKDAYS = OrderedDict(
|
|
|
|
[
|
|
|
|
('mo', 'Monday'),
|
|
|
|
('tu', 'Tuesday'),
|
|
|
|
('we', 'Wednesday'),
|
|
|
|
('th', 'Thursday'),
|
|
|
|
('fr', 'Friday'),
|
|
|
|
('sa', 'Saturday'),
|
|
|
|
('su', 'Sunday'),
|
|
|
|
]
|
|
|
|
)
|
2018-03-31 20:18:28 +02:00
|
|
|
EN_ABBREV_WEEKDAYS_LIST = list(EN_ABBREV_WEEKDAYS.keys())
|
2018-08-17 10:14:08 +02:00
|
|
|
EN_FULL_WEEKDAYS_LIST = list(EN_ABBREV_WEEKDAYS.values())
|
2018-05-11 13:00:32 +02:00
|
|
|
FR_ABBREV_WEEKDAYS_LIST = OrderedDict(zip(EN_ABBREV_WEEKDAYS_LIST, FR_WEEKDAYS))
|
2018-03-02 17:54:59 +01:00
|
|
|
|
2017-07-07 08:44:24 +02:00
|
|
|
|
|
|
|
class TimeSlot(object):
|
|
|
|
def __init__(self, start, end):
|
2018-03-02 17:54:59 +01:00
|
|
|
if is_naive(start):
|
|
|
|
start = make_aware(start)
|
|
|
|
if is_naive(end):
|
|
|
|
end = make_aware(end)
|
2017-07-07 08:44:24 +02:00
|
|
|
self.start = start
|
|
|
|
self.end = end
|
|
|
|
|
2018-03-02 17:54:59 +01:00
|
|
|
def __repr__(self):
|
|
|
|
return '<TimeSlot start=%s - end=%s>' % (self.start.strftime('%c'), self.end.strftime('%c'))
|
2017-07-07 08:44:24 +02:00
|
|
|
|
2018-03-02 17:54:59 +01:00
|
|
|
|
2020-12-05 20:51:09 +01:00
|
|
|
def openinghours_to_datetime(day_number, hour, minute, base_datetime):
|
|
|
|
"""
|
|
|
|
return the next date and time after base_datetime
|
|
|
|
"""
|
|
|
|
day_number = day_number % 7 # ease operations using this parameter
|
|
|
|
|
|
|
|
# get next weekday
|
|
|
|
days = (7 + day_number - base_datetime.weekday()) % 7
|
|
|
|
datetime_obj = base_datetime + datetime.timedelta(days=days)
|
|
|
|
if is_naive(datetime_obj):
|
|
|
|
datetime_obj = make_aware(datetime_obj)
|
|
|
|
|
|
|
|
# set time
|
|
|
|
try:
|
|
|
|
time = datetime.time(hour=hour, minute=minute)
|
|
|
|
except ValueError:
|
|
|
|
return None
|
|
|
|
datetime_obj = datetime.datetime.combine(datetime_obj, time)
|
|
|
|
return datetime_obj
|
|
|
|
|
|
|
|
|
|
|
|
def get_slot(day_number, time_table, base_datetime):
|
|
|
|
start_hour = int(time_table['start_hour'])
|
|
|
|
start_minute = int(time_table['start_minute'])
|
|
|
|
end_hour = int(time_table['end_hour'])
|
|
|
|
end_minute = int(time_table['end_minute'])
|
|
|
|
|
2020-12-05 22:55:49 +01:00
|
|
|
if end_hour == 24 and end_minute == 0:
|
|
|
|
end_hour = 0
|
|
|
|
|
2020-12-05 20:51:09 +01:00
|
|
|
start = openinghours_to_datetime(day_number, start_hour, start_minute, base_datetime)
|
|
|
|
|
|
|
|
# hours may belongs on next day
|
|
|
|
end_day_number = day_number
|
2020-12-05 23:14:42 +01:00
|
|
|
if (
|
|
|
|
end_hour < start_hour
|
|
|
|
or end_hour == start_hour
|
|
|
|
and end_minute < start_minute
|
|
|
|
or end_hour == start_hour == 0
|
|
|
|
and end_minute == start_minute == 0
|
|
|
|
): # 24h/24
|
2020-12-05 20:51:09 +01:00
|
|
|
end_day_number += 1
|
|
|
|
end = openinghours_to_datetime(end_day_number, end_hour, end_minute, base_datetime)
|
|
|
|
|
|
|
|
if end < start:
|
|
|
|
# end time may be find this week whereas start time is picked on next week,
|
|
|
|
# this occure if we are now past 24:00, on next day.
|
|
|
|
# Compute start time from yesterday
|
|
|
|
yesterday = base_datetime - datetime.timedelta(days=1)
|
|
|
|
start = openinghours_to_datetime(day_number, start_hour, start_minute, yesterday)
|
|
|
|
|
|
|
|
return TimeSlot(start, end)
|
|
|
|
|
|
|
|
|
2020-12-01 19:59:59 +01:00
|
|
|
def get_time_table_from_specification(specification):
|
|
|
|
"""Parse an openinghoursspecification data block"""
|
|
|
|
if not isinstance(specification['dayOfWeek'], str):
|
|
|
|
raise ValueError
|
|
|
|
day_of_week = specification.get('dayOfWeek')
|
|
|
|
day_number = EN_FULL_WEEKDAYS_LIST.index(day_of_week.split('/')[-1])
|
|
|
|
start_hour, start_minute = specification['opens'].split(':')
|
|
|
|
end_hour, end_minute = specification['closes'].split(':')
|
|
|
|
time_table = {
|
|
|
|
'start_hour': start_hour,
|
|
|
|
'start_minute': start_minute,
|
|
|
|
'end_hour': end_hour,
|
|
|
|
'end_minute': end_minute,
|
|
|
|
}
|
2020-12-05 21:09:59 +01:00
|
|
|
return (day_number, time_table)
|
2020-12-01 19:59:59 +01:00
|
|
|
|
|
|
|
|
2020-12-05 19:41:44 +01:00
|
|
|
def get_period_from_data(time_table):
|
2018-05-11 13:00:32 +02:00
|
|
|
"""Return am or pm and all_day_hours from opening_time and closing_time"""
|
2020-12-05 19:27:22 +01:00
|
|
|
start_hour = int(time_table['start_hour'])
|
|
|
|
start_minute = int(time_table['start_minute'])
|
|
|
|
end_hour = int(time_table['end_hour'])
|
|
|
|
end_minute = int(time_table['end_minute'])
|
|
|
|
|
2020-12-05 22:55:49 +01:00
|
|
|
if end_hour == 24 and end_minute == 0:
|
|
|
|
end_hour = 0
|
|
|
|
|
2020-12-05 19:41:44 +01:00
|
|
|
closing_time = datetime.time(hour=end_hour, minute=end_minute)
|
|
|
|
opening_time = datetime.time(hour=start_hour, minute=start_minute)
|
2018-05-11 13:00:32 +02:00
|
|
|
|
|
|
|
all_day_hours = False
|
2020-12-01 13:52:31 +01:00
|
|
|
if opening_time < closing_time and closing_time.hour <= 12: # closing_time may last on the night
|
2018-05-11 13:00:32 +02:00
|
|
|
period = 'am'
|
|
|
|
elif opening_time.hour <= 12:
|
|
|
|
period = 'am'
|
|
|
|
all_day_hours = True
|
|
|
|
else:
|
|
|
|
period = 'pm'
|
|
|
|
|
|
|
|
return (period, all_day_hours)
|
|
|
|
|
|
|
|
|
2020-12-05 21:22:07 +01:00
|
|
|
def get_slots_from_mdr_format(data, base_datetime):
|
2018-05-11 13:00:32 +02:00
|
|
|
"""Process data from Maison du rhone geojson data from data.grandlyon.fr
|
|
|
|
(/ws/grandlyon/ter_territoire.maison_du_rhone/all.json)
|
2018-03-02 17:54:59 +01:00
|
|
|
add to slots all the next opening hours in chronological order & beginning from today()
|
|
|
|
"""
|
2018-03-10 17:21:40 +01:00
|
|
|
if 'properties' in data:
|
|
|
|
data = data['properties']
|
2018-03-02 17:54:59 +01:00
|
|
|
slots = []
|
|
|
|
known_format = False
|
|
|
|
mdr_weekdays_format = ['%s_am' % day for day in FR_WEEKDAYS] + ['%s_pm' % day for day in FR_WEEKDAYS]
|
|
|
|
if any([re.search('|'.join(mdr_weekdays_format), data_key) is not None for data_key in data.keys()]):
|
|
|
|
known_format = True
|
2020-12-05 21:22:07 +01:00
|
|
|
today = base_datetime
|
2018-03-02 17:54:59 +01:00
|
|
|
for i in range(7):
|
|
|
|
for period in ('am', 'pm'):
|
|
|
|
hours = data.get('%s_%s' % (FR_WEEKDAYS[today.weekday()], period))
|
|
|
|
if not hours:
|
|
|
|
continue
|
|
|
|
try:
|
2020-12-05 21:28:28 +01:00
|
|
|
parts = re.match(r'(\d?\d)h(\d\d)-(\d?\d)h(\d\d)', hours).groups()
|
2018-03-02 17:54:59 +01:00
|
|
|
except AttributeError:
|
|
|
|
continue
|
2020-12-05 21:22:07 +01:00
|
|
|
time_table = {
|
|
|
|
'start_hour': int(parts[0]),
|
|
|
|
'start_minute': int(parts[1]),
|
|
|
|
'end_hour': int(parts[2]),
|
|
|
|
'end_minute': int(parts[3]),
|
|
|
|
}
|
|
|
|
|
2018-03-02 17:54:59 +01:00
|
|
|
# add to slots the opening hours in chronological order beginning from today
|
2020-12-05 21:22:07 +01:00
|
|
|
timeslot = get_slot(today.weekday(), time_table, base_datetime)
|
|
|
|
slots.append(timeslot)
|
|
|
|
|
|
|
|
today += datetime.timedelta(days=1)
|
2018-03-02 17:54:59 +01:00
|
|
|
|
|
|
|
return (slots, known_format)
|
|
|
|
|
|
|
|
|
2018-08-17 10:18:48 +02:00
|
|
|
def parse_opening_hours_data(mairie_data):
|
|
|
|
"""Parse every known openinghours data formats"""
|
|
|
|
for openinghours in mairie_data.get('openinghours', []):
|
|
|
|
# format is comma-separated days and/or intervals, or only one day
|
2018-10-02 14:04:32 +02:00
|
|
|
try:
|
2020-12-05 21:28:28 +01:00
|
|
|
groups = re.match(
|
|
|
|
r'(\w\w(?:(?:,|\-)?\w\w)*) (\d\d?):(\d\d?)-(\d\d?):(\d\d?)', openinghours
|
|
|
|
).groups()
|
2018-10-02 14:04:32 +02:00
|
|
|
except AttributeError: # invalid input data
|
|
|
|
continue
|
2018-08-17 10:18:48 +02:00
|
|
|
for day in groups[0].split(','):
|
|
|
|
if '-' in day:
|
|
|
|
# interval
|
2020-12-05 21:28:28 +01:00
|
|
|
parts = re.match(r'(\w\w)-(\w\w)', day).groups() + groups[1:]
|
2018-08-17 10:18:48 +02:00
|
|
|
time_table = dict(
|
|
|
|
zip(
|
|
|
|
('start_day', 'end_day', 'start_hour', 'start_minute', 'end_hour', 'end_minute'),
|
|
|
|
parts,
|
|
|
|
)
|
2021-01-11 21:31:40 +01:00
|
|
|
)
|
2018-08-17 10:18:48 +02:00
|
|
|
days_list = EN_ABBREV_WEEKDAYS_LIST[
|
|
|
|
EN_ABBREV_WEEKDAYS_LIST.index(
|
|
|
|
time_table['start_day'].lower()
|
|
|
|
) : EN_ABBREV_WEEKDAYS_LIST.index(time_table['end_day'].lower())
|
|
|
|
+ 1
|
|
|
|
]
|
|
|
|
else:
|
|
|
|
# one day
|
|
|
|
time_table = dict(
|
|
|
|
zip(
|
|
|
|
('start_day', 'start_hour', 'start_minute', 'end_hour', 'end_minute'),
|
|
|
|
(day,) + groups[1:],
|
|
|
|
)
|
2021-01-11 21:31:40 +01:00
|
|
|
)
|
2018-08-17 10:18:48 +02:00
|
|
|
days_list = [
|
|
|
|
EN_ABBREV_WEEKDAYS_LIST[EN_ABBREV_WEEKDAYS_LIST.index(time_table['start_day'].lower())]
|
2021-01-11 21:31:40 +01:00
|
|
|
]
|
2018-08-17 10:18:48 +02:00
|
|
|
|
|
|
|
yield (days_list, time_table)
|
|
|
|
|
|
|
|
|
2021-01-21 18:01:47 +01:00
|
|
|
def parse_mairie_formats(data, base_datetime, oh_add, ohs_add, ohs_del):
|
2018-03-02 17:54:59 +01:00
|
|
|
if 'properties' in data:
|
|
|
|
data = data['properties']
|
|
|
|
|
2021-01-21 18:09:50 +01:00
|
|
|
some_opening_periods_defined = False
|
2018-03-02 17:54:59 +01:00
|
|
|
known_format = False
|
2018-10-10 19:12:43 +02:00
|
|
|
previous_week = base_datetime - datetime.timedelta(7)
|
|
|
|
next_week = base_datetime + datetime.timedelta(7)
|
2021-01-21 18:01:47 +01:00
|
|
|
for specification in data.get('openinghoursspecification', []):
|
2021-01-21 18:09:50 +01:00
|
|
|
known_format = True
|
2021-01-21 18:01:47 +01:00
|
|
|
valid_from, valid_through = previous_week, next_week
|
|
|
|
if specification.get('validFrom'):
|
|
|
|
valid_from = parse_valid_from(specification)
|
|
|
|
if specification.get('validThrough'):
|
|
|
|
valid_through = parse_valid_through(specification)
|
|
|
|
if not valid_from or not valid_through:
|
|
|
|
continue
|
2021-01-21 19:33:47 +01:00
|
|
|
if base_datetime > valid_through:
|
2021-01-21 18:09:50 +01:00
|
|
|
continue
|
2021-01-21 18:01:47 +01:00
|
|
|
if specification.get('opens') and specification.get('closes'):
|
2021-01-21 19:33:47 +01:00
|
|
|
# parse specification only for the current period relative to utcnow()
|
|
|
|
if base_datetime < valid_from:
|
|
|
|
continue
|
2021-01-21 18:01:47 +01:00
|
|
|
# case when opening periods are defined
|
2021-01-21 18:09:50 +01:00
|
|
|
some_opening_periods_defined = True
|
|
|
|
try:
|
|
|
|
day_number, time_table = get_time_table_from_specification(specification)
|
|
|
|
except ValueError:
|
|
|
|
continue
|
|
|
|
ohs_add(day_number, time_table)
|
2021-01-21 18:01:47 +01:00
|
|
|
else:
|
|
|
|
# case when exclusions are defined
|
|
|
|
ohs_del(valid_from, valid_through)
|
2018-03-02 17:54:59 +01:00
|
|
|
|
2021-01-21 18:09:50 +01:00
|
|
|
if not some_opening_periods_defined:
|
2021-01-21 18:01:47 +01:00
|
|
|
# some mairie may only have opening periods defined into openinghours (e.g. Bron)
|
2018-08-17 10:18:48 +02:00
|
|
|
for days_list, time_table in parse_opening_hours_data(data):
|
2021-01-21 15:35:53 +01:00
|
|
|
known_format = True
|
2021-01-21 18:01:47 +01:00
|
|
|
oh_add(days_list, time_table)
|
|
|
|
return known_format
|
2018-03-02 17:54:59 +01:00
|
|
|
|
|
|
|
|
2020-02-26 10:40:19 +01:00
|
|
|
def parse_valid_from(spec):
|
|
|
|
valid_from = parse_datetime(spec.get('validFrom')) or parse_date(spec.get('validFrom'))
|
|
|
|
if not isinstance(valid_from, datetime.datetime):
|
2020-02-26 19:47:21 +01:00
|
|
|
valid_from = make_aware(datetime.datetime(valid_from.year, valid_from.month, valid_from.day))
|
2020-02-26 10:40:19 +01:00
|
|
|
return valid_from
|
|
|
|
|
|
|
|
|
|
|
|
def parse_valid_through(spec):
|
|
|
|
valid_through = parse_datetime(spec.get('validThrough')) or parse_date(spec.get('validThrough'))
|
|
|
|
if not isinstance(valid_through, datetime.datetime):
|
2020-02-26 19:47:21 +01:00
|
|
|
valid_through = make_aware(
|
|
|
|
datetime.datetime(valid_through.year, valid_through.month, valid_through.day, 23, 59)
|
2021-01-11 21:31:40 +01:00
|
|
|
)
|
2020-02-26 10:40:19 +01:00
|
|
|
return valid_through
|
|
|
|
|
|
|
|
|
2020-04-07 17:57:34 +02:00
|
|
|
@register.simple_tag
|
2018-05-11 13:00:32 +02:00
|
|
|
def get_mairie_opening_hours(mairie_data):
|
|
|
|
"""Process Mairie Geojson to extract data of each day's opening hours"""
|
2021-01-21 18:01:47 +01:00
|
|
|
|
2018-05-11 13:00:32 +02:00
|
|
|
if not mairie_data:
|
|
|
|
return ''
|
|
|
|
|
2019-10-27 17:18:39 +01:00
|
|
|
base_datetime = now()
|
2021-01-21 19:33:47 +01:00
|
|
|
exclusions = set()
|
2018-05-11 13:00:32 +02:00
|
|
|
opening_hours_dict = OrderedDict(
|
|
|
|
zip(EN_ABBREV_WEEKDAYS_LIST, [{'am': None, 'pm': None} for i in range(7)])
|
|
|
|
)
|
2020-12-01 19:59:59 +01:00
|
|
|
|
|
|
|
def update_opening_hours(weekday, time_table):
|
2020-12-05 19:41:44 +01:00
|
|
|
period, all_day_hours = get_period_from_data(time_table)
|
2020-12-01 19:59:59 +01:00
|
|
|
if all_day_hours and period == 'am':
|
|
|
|
opening_hours_dict[weekday]['pm'] = '' # empty string to avoid displaying fermé
|
|
|
|
opening_hours_dict[weekday][period] = "%sh%s-%sh%s" % (
|
|
|
|
time_table['start_hour'],
|
|
|
|
time_table['start_minute'],
|
|
|
|
time_table['end_hour'],
|
|
|
|
time_table['end_minute'],
|
|
|
|
)
|
|
|
|
|
2021-01-21 18:01:47 +01:00
|
|
|
def oh_add(days_list, time_table):
|
|
|
|
for weekday in days_list:
|
|
|
|
update_opening_hours(weekday, time_table)
|
2018-10-10 19:12:43 +02:00
|
|
|
|
2021-01-21 18:01:47 +01:00
|
|
|
def ohs_add(day_number, time_table):
|
|
|
|
weekday = EN_ABBREV_WEEKDAYS_LIST[day_number]
|
|
|
|
update_opening_hours(weekday, time_table)
|
|
|
|
|
|
|
|
def ohs_del(valid_from, valid_through):
|
2021-01-21 19:33:47 +01:00
|
|
|
day = max(base_datetime, valid_from)
|
|
|
|
nb_max_days = 7 - max(valid_from.toordinal() - base_datetime.toordinal(), 0)
|
|
|
|
nb_days = 0
|
|
|
|
while nb_days < nb_max_days and day < valid_through:
|
|
|
|
exclusions.add(EN_ABBREV_WEEKDAYS_LIST[day.weekday()])
|
|
|
|
day += datetime.timedelta(days=1)
|
|
|
|
nb_days += 1
|
2021-01-21 18:01:47 +01:00
|
|
|
|
|
|
|
known_format = parse_mairie_formats(mairie_data, base_datetime, oh_add, ohs_add, ohs_del)
|
2021-01-21 19:33:47 +01:00
|
|
|
for weekday in exclusions:
|
|
|
|
opening_hours_dict[weekday] = {'am': None, 'pm': None}
|
2018-05-11 13:00:32 +02:00
|
|
|
|
2018-10-01 19:00:17 +02:00
|
|
|
if not (
|
|
|
|
any([x['am'] for x in opening_hours_dict.values()])
|
|
|
|
or any([x['pm'] for x in opening_hours_dict.values()])
|
|
|
|
):
|
2020-04-15 17:08:42 +02:00
|
|
|
# always closed, returns None if the format is unknown so it can be
|
|
|
|
# displayed as "unavailable".
|
|
|
|
if not known_format:
|
|
|
|
return None
|
|
|
|
# otherwise returns an array of closed days
|
|
|
|
return [(weekday, {'am': None, 'pm': ''}) for weekday in FR_WEEKDAYS]
|
2018-10-01 19:00:17 +02:00
|
|
|
|
2018-05-11 13:00:32 +02:00
|
|
|
return [
|
|
|
|
(FR_ABBREV_WEEKDAYS_LIST[weekday], hours)
|
|
|
|
for weekday, hours in opening_hours_dict.items()
|
|
|
|
if not (weekday in ['sa', 'su'] and not hours['am'] and not hours['pm'])
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2018-03-02 17:54:59 +01:00
|
|
|
@register.filter
|
2018-08-17 10:18:48 +02:00
|
|
|
def as_opening_hours_badge(data):
|
2018-03-02 17:54:59 +01:00
|
|
|
if not data:
|
|
|
|
return ''
|
|
|
|
|
2019-10-27 17:18:39 +01:00
|
|
|
base_datetime = now()
|
2021-01-21 18:01:47 +01:00
|
|
|
slots = []
|
2018-03-02 17:54:59 +01:00
|
|
|
exclusion_slots = []
|
|
|
|
today = base_datetime.date()
|
2018-03-10 17:27:44 +01:00
|
|
|
(slots, known_format) = get_slots_from_mdr_format(data, base_datetime)
|
2021-01-21 18:01:47 +01:00
|
|
|
|
|
|
|
def oh_add(days_list, time_table):
|
|
|
|
for weekday in days_list:
|
|
|
|
day_number = EN_ABBREV_WEEKDAYS_LIST.index(weekday)
|
|
|
|
timeslot = get_slot(day_number, time_table, base_datetime)
|
|
|
|
# add to slots the opening hours in chronological order beginning from today
|
|
|
|
slots.append(timeslot)
|
|
|
|
|
|
|
|
def ohs_add(day_number, time_table):
|
|
|
|
timeslot = get_slot(day_number, time_table, base_datetime)
|
|
|
|
slots.append(timeslot)
|
|
|
|
|
|
|
|
def ohs_del(valid_from, valid_through):
|
|
|
|
exclusion_slots.append(TimeSlot(valid_from, valid_through))
|
|
|
|
|
2018-03-02 17:54:59 +01:00
|
|
|
if not known_format:
|
2021-01-21 18:01:47 +01:00
|
|
|
# Process mairie json and return slots the opening hours
|
|
|
|
# in chronological order beginning today
|
|
|
|
known_format = parse_mairie_formats(data, base_datetime, oh_add, ohs_add, ohs_del)
|
|
|
|
# order slots and cycle the list beginning with 'base_datetime'
|
|
|
|
slots.sort(key=operator.attrgetter('start'))
|
2018-03-02 17:54:59 +01:00
|
|
|
|
|
|
|
if not known_format:
|
|
|
|
return ''
|
|
|
|
|
|
|
|
# remove past slots and exclude special timeslots
|
2017-07-07 08:44:24 +02:00
|
|
|
for i, slot in enumerate(slots):
|
2018-03-02 17:54:59 +01:00
|
|
|
if base_datetime > slot.end:
|
2017-07-07 08:44:24 +02:00
|
|
|
slots[i] = None
|
2018-03-02 17:54:59 +01:00
|
|
|
else:
|
|
|
|
for exclusion in exclusion_slots:
|
|
|
|
if slot.start >= exclusion.start and slot.end <= exclusion.end:
|
|
|
|
slots[i] = None
|
2017-07-07 08:44:24 +02:00
|
|
|
|
2020-12-05 22:55:49 +01:00
|
|
|
def format_time(hour, minute):
|
|
|
|
time = "%sh%02d" % (hour, minute)
|
|
|
|
if time == '0h00':
|
|
|
|
time = 'minuit'
|
|
|
|
return time
|
|
|
|
|
2018-03-02 17:54:59 +01:00
|
|
|
# parse slots to return the right html
|
|
|
|
slots = [x for x in slots if x]
|
2017-07-29 09:21:17 +02:00
|
|
|
if not slots:
|
|
|
|
klass = 'closed'
|
|
|
|
label = u'Fermé'
|
2018-03-02 17:54:59 +01:00
|
|
|
elif base_datetime < slots[0].start:
|
2017-07-07 08:44:24 +02:00
|
|
|
klass = 'closed'
|
|
|
|
verb = u'Réouvre'
|
|
|
|
if slots[0].start.weekday() == today.weekday():
|
|
|
|
day_label = ''
|
|
|
|
if slots[0].start.hour < 12:
|
|
|
|
verb = 'Ouvre'
|
|
|
|
elif slots[0].start.weekday() == (today.weekday() + 1) % 7:
|
|
|
|
day_label = u'demain'
|
|
|
|
else:
|
2018-03-02 17:54:59 +01:00
|
|
|
day_label = FR_WEEKDAYS[slots[0].start.weekday()]
|
2020-12-05 23:14:42 +01:00
|
|
|
if slots[0].start.strftime("%H:%M") == slots[0].end.strftime("%H:%M") == '00:00':
|
|
|
|
label = u'%s %s 24h/24' % (verb, day_label)
|
|
|
|
else:
|
|
|
|
time = format_time(slots[0].start.hour, slots[0].start.minute)
|
|
|
|
label = u'%s %s à %s' % (verb, day_label, time)
|
2018-03-02 17:54:59 +01:00
|
|
|
elif base_datetime < slots[0].end:
|
|
|
|
if (slots[0].end - base_datetime).seconds < 3600:
|
2017-07-07 08:44:24 +02:00
|
|
|
klass = 'soon-to-be-closed'
|
|
|
|
else:
|
|
|
|
klass = 'open'
|
2020-12-05 23:14:42 +01:00
|
|
|
if slots[0].start.strftime("%H:%M") == slots[0].end.strftime("%H:%M") == '00:00':
|
|
|
|
label = u"Ouvert 24h/24"
|
|
|
|
else:
|
|
|
|
time = format_time(slots[0].end.hour, slots[0].end.minute)
|
|
|
|
label = u"Ouvert jusqu'à %s" % time
|
2017-07-07 08:44:24 +02:00
|
|
|
|
2018-03-02 17:54:59 +01:00
|
|
|
return mark_safe(u'<div class="badge %s"><span>%s</span></div>' % (klass, label))
|
|
|
|
|
2017-07-10 15:00:54 +02:00
|
|
|
|
|
|
|
@register.filter
|
|
|
|
def onlymoov_duration(string):
|
|
|
|
# take the hours and minutes components of duration strings provided by
|
|
|
|
# onlymoov, "PT1H16M3S", "PT1M35S"
|
|
|
|
try:
|
|
|
|
groups = re.match(r'PT(\d+H)?(\d+M)', string).groups()
|
2018-03-02 17:54:59 +01:00
|
|
|
except AttributeError: # didn't match
|
2017-07-10 15:00:54 +02:00
|
|
|
return '?'
|
|
|
|
hours = ''
|
|
|
|
if groups[0]:
|
|
|
|
nb_hours = int(groups[0][:-1])
|
|
|
|
if nb_hours > 1:
|
|
|
|
hours = '%s heures' % nb_hours
|
|
|
|
else:
|
|
|
|
hours = '%s heure' % nb_hours
|
|
|
|
nb_minutes = int(groups[1][:-1])
|
|
|
|
if nb_minutes == 0:
|
|
|
|
minutes = ''
|
|
|
|
else:
|
|
|
|
minutes = '%d min' % nb_minutes
|
|
|
|
return '%s %s' % (hours, minutes)
|
2017-08-21 11:44:12 +02:00
|
|
|
|
|
|
|
|
|
|
|
@register.filter
|
2018-06-15 11:08:54 +02:00
|
|
|
def place_page(cell):
|
2017-08-21 11:44:12 +02:00
|
|
|
try:
|
|
|
|
fixed_place_cell = ConfigJsonCell.objects.get(
|
|
|
|
key=cell.key, parameters=cell.parameters, page__template_name='place'
|
|
|
|
)
|
|
|
|
except ConfigJsonCell.DoesNotExist:
|
2018-06-15 11:08:54 +02:00
|
|
|
return None
|
|
|
|
return fixed_place_cell.page
|
|
|
|
|
|
|
|
|
|
|
|
@register.filter
|
|
|
|
def place_page_url(cell):
|
|
|
|
page = place_page(cell)
|
|
|
|
if page is None:
|
2017-08-21 11:44:12 +02:00
|
|
|
return ''
|
2018-06-15 11:08:54 +02:00
|
|
|
return page.get_online_url()
|
2017-08-21 11:44:12 +02:00
|
|
|
|
|
|
|
|
|
|
|
@register.filter
|
|
|
|
def is_place_page(page):
|
2017-08-22 14:20:20 +02:00
|
|
|
if not page:
|
|
|
|
return False
|
2017-08-21 11:44:12 +02:00
|
|
|
return page.template_name == 'place'
|
2017-09-13 15:35:26 +02:00
|
|
|
|
|
|
|
|
2020-04-07 17:57:34 +02:00
|
|
|
@register.simple_tag
|
2018-07-02 22:44:38 +02:00
|
|
|
def get_tile_picture_size(page):
|
|
|
|
if is_place_page(page):
|
|
|
|
return '1300'
|
|
|
|
return '300x300'
|
|
|
|
|
|
|
|
|
2017-09-13 15:35:26 +02:00
|
|
|
@register.filter
|
2018-11-29 16:25:29 +01:00
|
|
|
def as_producer(slug, default_slug=None):
|
2019-05-09 11:25:09 +02:00
|
|
|
COLLECTIVITY_UNACCENT_LABELS = {x: slugify(y) for x, y in settings.COLLECTIVITY_LABELS.items()}
|
2018-04-19 11:34:26 +02:00
|
|
|
if isinstance(slug, dict):
|
|
|
|
# actually a form
|
2019-02-25 14:50:21 +01:00
|
|
|
if slug.get('form_digest'):
|
|
|
|
parenthesis = re.match(r'.*\((.*)\)', slug['form_digest'])
|
|
|
|
if parenthesis:
|
|
|
|
city_name = parenthesis.group(1)
|
|
|
|
if city_name in settings.COLLECTIVITY_LABELS.values():
|
|
|
|
collectivity = [x for x in settings.COLLECTIVITY_LABELS.items() if x[1] == city_name][0]
|
|
|
|
return {'slug': collectivity[0], 'label': collectivity[1]}
|
2019-05-09 11:25:09 +02:00
|
|
|
city_slug = slugify(city_name)
|
|
|
|
if city_slug in COLLECTIVITY_UNACCENT_LABELS.values():
|
2019-05-19 21:30:32 +02:00
|
|
|
collectivity = [x for x in COLLECTIVITY_UNACCENT_LABELS.items() if x[1] == city_slug][0]
|
2019-05-09 11:25:09 +02:00
|
|
|
return {'slug': collectivity[0], 'label': collectivity[1]}
|
2019-02-25 14:50:21 +01:00
|
|
|
|
2018-04-19 11:34:26 +02:00
|
|
|
for keyword in slug.get('keywords') or []:
|
|
|
|
if keyword.startswith('producer-'):
|
|
|
|
slug = keyword.split('-', 1)[1]
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
slug = slug.get('site_slug')
|
|
|
|
|
2018-01-06 11:04:40 +01:00
|
|
|
producer = None
|
2017-09-24 10:12:47 +02:00
|
|
|
if ':' in slug: # formdef_reference
|
|
|
|
slug = slug.split(':')[0]
|
2018-03-15 17:44:48 +01:00
|
|
|
|
|
|
|
if slug.startswith('_'):
|
|
|
|
producer = slug.split('_')[1].replace('hobo-', '')
|
2018-09-11 19:43:15 +02:00
|
|
|
else:
|
|
|
|
producer = slug
|
2018-03-15 17:44:48 +01:00
|
|
|
|
|
|
|
if slug == 'eservices':
|
|
|
|
# handle collectivity sites, they are individually named
|
|
|
|
# "eservices" but have the collectivity slug as a template
|
|
|
|
# variable.
|
|
|
|
producer = settings.TEMPLATE_VARS.get('gnm_commune', 'grandlyon')
|
|
|
|
if producer and settings.TEMPLATE_VARS.get('gnm_commune_name'):
|
|
|
|
return {'slug': producer, 'label': settings.TEMPLATE_VARS.get('gnm_commune_name')}
|
|
|
|
|
|
|
|
try:
|
2018-09-11 19:15:45 +02:00
|
|
|
producer = re.search(r'(^|\W)producer-([\w-]*)(\W|$)', producer).group(2).strip()
|
2018-03-15 17:44:48 +01:00
|
|
|
except AttributeError:
|
|
|
|
pass
|
2018-01-06 11:04:40 +01:00
|
|
|
|
2018-10-02 10:25:53 +02:00
|
|
|
if producer.startswith('Lyon '): # assume sth like "Lyon 7eme"
|
|
|
|
producer = 'Lyon'
|
|
|
|
|
2019-05-20 06:32:00 +02:00
|
|
|
producer_slug = slugify(producer)
|
2019-05-09 11:25:09 +02:00
|
|
|
|
2018-01-06 11:04:40 +01:00
|
|
|
if settings.KNOWN_SERVICES['hobo'].get('hobo-%s' % producer):
|
|
|
|
return {
|
|
|
|
'slug': producer,
|
|
|
|
'label': settings.KNOWN_SERVICES['hobo'].get('hobo-%s' % producer, {'title': ''})['title'],
|
|
|
|
}
|
2018-01-11 11:53:04 +01:00
|
|
|
elif settings.KNOWN_SERVICES['hobo'].get('_interco_hobo-%s' % producer):
|
|
|
|
return {
|
|
|
|
'slug': producer,
|
|
|
|
'label': settings.KNOWN_SERVICES['hobo'].get('_interco_hobo-%s' % producer, {'title': ''})[
|
|
|
|
'title'
|
2021-01-11 21:31:40 +01:00
|
|
|
],
|
2018-01-11 11:53:04 +01:00
|
|
|
}
|
2018-10-02 10:23:43 +02:00
|
|
|
elif producer in settings.PRODUCER_LABELS:
|
|
|
|
return {'slug': producer, 'label': settings.PRODUCER_LABELS[producer]}
|
2018-10-02 10:24:40 +02:00
|
|
|
elif producer in settings.COLLECTIVITY_LABELS.values():
|
|
|
|
collectivity = [x for x in settings.COLLECTIVITY_LABELS.items() if x[1] == producer][0]
|
|
|
|
return {'slug': collectivity[0], 'label': collectivity[1]}
|
2019-05-20 06:32:00 +02:00
|
|
|
elif producer_slug in COLLECTIVITY_UNACCENT_LABELS.values():
|
2019-05-19 21:30:32 +02:00
|
|
|
collectivity = [x for x in COLLECTIVITY_UNACCENT_LABELS.items() if x[1] == producer_slug][0]
|
2019-05-09 11:25:09 +02:00
|
|
|
return {'slug': collectivity[0], 'label': collectivity[1]}
|
2018-11-29 16:25:29 +01:00
|
|
|
elif default_slug:
|
|
|
|
return as_producer(default_slug)
|
2018-01-06 11:04:40 +01:00
|
|
|
else:
|
2018-09-11 19:20:01 +02:00
|
|
|
return {'slug': 'toodego', 'label': 'Toodego'}
|
2017-09-24 13:01:18 +02:00
|
|
|
|
2021-01-11 21:31:40 +01:00
|
|
|
|
2017-09-24 20:44:15 +02:00
|
|
|
@register.filter
|
|
|
|
def as_commune(user_data):
|
2017-09-25 14:49:35 +02:00
|
|
|
if not user_data:
|
|
|
|
return None
|
2018-09-14 14:20:58 +02:00
|
|
|
|
2019-10-14 20:31:50 +02:00
|
|
|
if isinstance(user_data, six.string_types):
|
2018-09-14 14:20:58 +02:00
|
|
|
# user_data is expected to be (page) slug
|
|
|
|
collectivities = get_gnm_collectivities()
|
|
|
|
for collectivity in collectivities:
|
|
|
|
if slugify(collectivity['label']) in user_data:
|
|
|
|
collectivity['gnm'] = True
|
|
|
|
return collectivity
|
|
|
|
return None
|
|
|
|
|
2017-09-24 20:44:15 +02:00
|
|
|
city = user_data.get('city') or user_data.get('address_city')
|
|
|
|
if city:
|
2017-09-27 12:33:55 +02:00
|
|
|
# first look for known portals
|
|
|
|
collectivities = get_gnm_collectivities()
|
|
|
|
for collectivity in collectivities:
|
|
|
|
if collectivity.get('label') == city:
|
|
|
|
return {
|
|
|
|
'label': city,
|
|
|
|
'slug': slugify(city),
|
2018-09-14 14:20:58 +02:00
|
|
|
'url': collectivity['url'],
|
|
|
|
'gnm': True,
|
2017-09-27 12:33:55 +02:00
|
|
|
}
|
|
|
|
# if not found look in mairie pages
|
2017-09-24 20:44:15 +02:00
|
|
|
pages = Page.objects.filter(parent__slug='mairie', slug__icontains=slugify(city)).exclude(
|
|
|
|
slug__icontains='annexe'
|
|
|
|
)
|
|
|
|
if pages.exists():
|
|
|
|
return {
|
|
|
|
'label': city,
|
|
|
|
'slug': slugify(city),
|
2017-09-27 12:33:55 +02:00
|
|
|
'url': pages[0].get_online_url(),
|
2017-09-24 20:44:15 +02:00
|
|
|
}
|
|
|
|
return None
|
|
|
|
|
2017-09-24 13:01:18 +02:00
|
|
|
|
2020-04-07 17:57:34 +02:00
|
|
|
@register.simple_tag
|
2019-05-26 11:03:29 +02:00
|
|
|
def get_suggestions(request, user_data, places_data):
|
|
|
|
# fill initial dashboard based on this layout:
|
|
|
|
## au quotidien
|
|
|
|
# mairie tile
|
|
|
|
# closest velov/tcl/swimming pool/etc. tiles
|
|
|
|
## environnement
|
|
|
|
# air quality
|
|
|
|
# pollen
|
|
|
|
|
2021-04-13 15:25:47 +02:00
|
|
|
if not getattr(request, 'user', None) or not request.user.is_authenticated:
|
2019-05-26 11:03:29 +02:00
|
|
|
# no user
|
|
|
|
return ['no user']
|
|
|
|
|
|
|
|
dashboard = DashboardCell.objects.all().filter(page__snapshot__isnull=True)[0]
|
|
|
|
if Tile.objects.filter(dashboard=dashboard, user=request.user).exists():
|
|
|
|
# dashboard already filled
|
|
|
|
return ['already filled']
|
|
|
|
|
|
|
|
mairie_tile = None
|
|
|
|
service_tiles = []
|
2019-06-05 16:05:05 +02:00
|
|
|
airquality_tile = {
|
|
|
|
'key': 'airquality',
|
|
|
|
'parameters': {
|
|
|
|
'lon': settings.COMBO_MAP_DEFAULT_POSITION['lng'],
|
|
|
|
'lat': settings.COMBO_MAP_DEFAULT_POSITION['lat'],
|
|
|
|
},
|
|
|
|
}
|
2019-05-26 11:03:29 +02:00
|
|
|
pollen_tile = {'key': 'pollen'}
|
|
|
|
|
2017-09-24 13:01:18 +02:00
|
|
|
city = user_data.get('city') or user_data.get('address_city')
|
2020-05-13 15:12:08 +02:00
|
|
|
zipcode = user_data.get('zipcode') or user_data.get('address_zipcode')
|
2017-09-24 13:01:18 +02:00
|
|
|
if city:
|
|
|
|
# get commune tile for the user city
|
|
|
|
maplayer = MapLayer.objects.get(slug='mairie')
|
2017-09-24 14:15:15 +02:00
|
|
|
try:
|
|
|
|
data_result = requests.get(
|
|
|
|
maplayer.geojson_url, timeout=2, without_user=True, cache_duration=300
|
|
|
|
).json()
|
2017-09-25 18:09:09 +02:00
|
|
|
except RequestException:
|
2017-09-24 14:15:15 +02:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
city_slug = slugify(city)
|
2020-05-13 15:12:08 +02:00
|
|
|
if city_slug == 'lyon' and zipcode:
|
|
|
|
try:
|
2020-05-15 12:33:02 +02:00
|
|
|
city_slug = 'lyon-%s' % (int(zipcode) - 69000)
|
2020-05-13 15:12:08 +02:00
|
|
|
except ValueError:
|
|
|
|
# fallback to hotel de ville
|
|
|
|
city_slug = 'ville-de-lyon'
|
2017-09-24 14:15:15 +02:00
|
|
|
if data_result.get('features'):
|
|
|
|
for feature in data_result['features']:
|
|
|
|
if 'Annexe' in feature['properties']['nom']:
|
|
|
|
continue
|
|
|
|
if city_slug in slugify(feature['properties']['nom']):
|
2019-05-26 11:03:29 +02:00
|
|
|
mairie_tile = {'key': maplayer.slug, 'properties': feature['properties']}
|
2017-09-24 14:15:15 +02:00
|
|
|
break
|
2017-09-24 13:01:18 +02:00
|
|
|
|
2019-05-26 11:03:29 +02:00
|
|
|
address = None
|
|
|
|
if places_data and places_data.get('data'):
|
|
|
|
place_data = places_data['data'][0]
|
|
|
|
address = u'%(adresse)s, %(ville)s, France' % place_data['content']
|
|
|
|
elif user_data.get('address_street'):
|
2017-09-24 13:01:18 +02:00
|
|
|
if not user_data.get('address_number'):
|
|
|
|
user_data['address_number'] = ''
|
2019-05-26 11:03:29 +02:00
|
|
|
address = u'%(address_number)s %(address_street)s, %(address_city)s, France' % user_data
|
2017-09-24 13:01:18 +02:00
|
|
|
|
2019-05-26 11:03:29 +02:00
|
|
|
coords = None
|
|
|
|
if address:
|
|
|
|
nominatim_url = settings.COMBO_GEOCODING_SERVICE
|
2017-09-24 14:13:29 +02:00
|
|
|
url = '%s/search?q=%s&accept-language=fr&format=json' % (
|
2018-03-31 20:15:07 +02:00
|
|
|
nominatim_url,
|
|
|
|
quote(address.encode('utf-8')),
|
|
|
|
)
|
2019-10-27 19:03:12 +01:00
|
|
|
search_result = None
|
2017-09-24 14:15:15 +02:00
|
|
|
try:
|
|
|
|
search_result = requests.get(url, timeout=2, without_user=True, cache_duration=300).json()
|
2017-09-25 18:09:09 +02:00
|
|
|
except RequestException:
|
2019-05-26 11:03:29 +02:00
|
|
|
pass
|
|
|
|
if search_result:
|
|
|
|
coords = {'lon': search_result[0]['lon'], 'lat': search_result[0]['lat']}
|
|
|
|
|
|
|
|
if coords:
|
|
|
|
airquality_tile = {'key': 'airquality', 'parameters': coords}
|
2017-09-24 13:01:18 +02:00
|
|
|
|
2019-05-26 11:03:29 +02:00
|
|
|
lat1, lat2 = float(coords['lat']) - 0.008, float(coords['lat']) + 0.008
|
|
|
|
lon1, lon2 = float(coords['lon']) - 0.006, float(coords['lon']) + 0.006
|
|
|
|
geod = Geod(ellps='WGS84')
|
|
|
|
for maplayer in MapLayer.objects.filter(slug__in=('velov', 'piscine', 'tcl', 'bibliotheque', 'mdr')):
|
2017-09-24 13:01:18 +02:00
|
|
|
url = maplayer.geojson_url + '&BBOX=%s,%s,%s,%s' % (lat1, lon1, lat2, lon2)
|
2017-09-24 14:15:15 +02:00
|
|
|
try:
|
|
|
|
data_result = requests.get(url, timeout=2, without_user=True, cache_duration=300).json()
|
2017-09-25 18:09:09 +02:00
|
|
|
except RequestException:
|
2017-09-24 14:15:15 +02:00
|
|
|
continue
|
2018-02-14 16:22:31 +01:00
|
|
|
features = data_result.get('features')
|
|
|
|
if not features:
|
2017-09-24 13:01:18 +02:00
|
|
|
continue
|
2018-02-14 16:22:31 +01:00
|
|
|
for feature in features:
|
2019-05-26 11:03:29 +02:00
|
|
|
feature['distance'] = geod.inv(
|
|
|
|
float(coords['lon']),
|
|
|
|
float(coords['lat']),
|
|
|
|
float(feature['geometry']['coordinates'][0]),
|
|
|
|
float(feature['geometry']['coordinates'][1]),
|
|
|
|
)[2]
|
2018-02-14 16:22:31 +01:00
|
|
|
features.sort(key=lambda x: x['distance'])
|
2019-05-26 11:03:29 +02:00
|
|
|
# take closest feature
|
|
|
|
if features:
|
|
|
|
service_tiles.append({'key': maplayer.slug, 'properties': features[0]['properties']})
|
|
|
|
|
|
|
|
tiles = []
|
|
|
|
if mairie_tile or service_tiles:
|
|
|
|
if mairie_tile:
|
|
|
|
tiles.append(mairie_tile)
|
|
|
|
if service_tiles:
|
|
|
|
random.shuffle(service_tiles)
|
|
|
|
tiles.extend(service_tiles)
|
|
|
|
|
|
|
|
if airquality_tile or pollen_tile:
|
|
|
|
tiles.append({'key': 'group-title', 'parameters': {'text': 'Environnement'}})
|
|
|
|
if airquality_tile:
|
|
|
|
tiles.append(airquality_tile)
|
|
|
|
if pollen_tile:
|
|
|
|
tiles.append(pollen_tile)
|
|
|
|
|
|
|
|
for i, tile_data in enumerate(tiles):
|
|
|
|
if 'properties' in tile_data:
|
|
|
|
cell_form_keys = [
|
|
|
|
x['varname'] for x in settings.JSON_CELL_TYPES[tile_data['key']].get('form') or {}
|
|
|
|
]
|
|
|
|
tile_data['parameters'] = {}
|
|
|
|
for key in cell_form_keys:
|
|
|
|
tile_data['parameters'][key] = tile_data['properties'].get(key)
|
|
|
|
|
|
|
|
cell = ConfigJsonCell(
|
|
|
|
key=tile_data['key'],
|
|
|
|
parameters=tile_data.get('parameters', {}),
|
|
|
|
order=0,
|
|
|
|
page_id=dashboard.page_id,
|
|
|
|
placeholder='_suggested_tile',
|
|
|
|
)
|
2017-09-24 13:01:18 +02:00
|
|
|
cell.save()
|
2019-05-26 11:03:29 +02:00
|
|
|
tile = Tile(dashboard=dashboard, cell=cell, user=request.user, order=i + 1)
|
|
|
|
tile.save()
|
|
|
|
|
|
|
|
return tiles
|
2017-09-25 10:23:08 +02:00
|
|
|
|
2021-01-11 21:31:40 +01:00
|
|
|
|
2020-04-07 17:57:34 +02:00
|
|
|
@register.simple_tag
|
2017-09-25 10:23:08 +02:00
|
|
|
def get_gnm_portal_url():
|
|
|
|
if '_interco_portal' in settings.KNOWN_SERVICES['combo']:
|
|
|
|
return settings.KNOWN_SERVICES['combo']['_interco_portal'].get('url')
|
|
|
|
return settings.KNOWN_SERVICES['combo']['portal'].get('url')
|
2017-09-27 12:33:55 +02:00
|
|
|
|
2021-01-11 21:31:40 +01:00
|
|
|
|
2020-04-07 17:57:34 +02:00
|
|
|
@register.simple_tag
|
2017-09-27 12:33:55 +02:00
|
|
|
def get_gnm_collectivities():
|
|
|
|
collectivities = []
|
|
|
|
for key in settings.KNOWN_SERVICES['combo']:
|
|
|
|
if not key.endswith('_portal'):
|
|
|
|
continue
|
|
|
|
matching_hobo = settings.KNOWN_SERVICES['hobo'].get(key.split('_portal')[0][1:])
|
|
|
|
if not matching_hobo:
|
|
|
|
continue
|
2018-10-21 13:02:36 +02:00
|
|
|
if matching_hobo['title'] in ('SAU', 'Villeurbanne'): # blacklist
|
2017-09-27 12:33:55 +02:00
|
|
|
continue
|
|
|
|
service = settings.KNOWN_SERVICES['combo'][key]
|
|
|
|
collectivities.append({'url': service.get('url'), 'label': matching_hobo['title']})
|
|
|
|
collectivities.sort(key=lambda x: x['label'])
|
|
|
|
return collectivities
|
2018-06-22 13:21:36 +02:00
|
|
|
|
2021-01-11 21:31:40 +01:00
|
|
|
|
2018-06-22 13:21:36 +02:00
|
|
|
@register.inclusion_tag('combo/gnm/place_map.html')
|
|
|
|
def gnm_place_map(lat, lng):
|
|
|
|
map_cell = Map()
|
|
|
|
map_cell.initial_zoom = '17'
|
|
|
|
map_cell.min_zoom = '17'
|
|
|
|
map_cell.max_zoom = '17'
|
|
|
|
context = map_cell.get_cell_extra_context({})
|
|
|
|
context['init_lat'] = lat
|
|
|
|
context['init_lng'] = lng
|
|
|
|
return context
|
2018-08-15 10:28:03 +02:00
|
|
|
|
2021-01-11 21:31:40 +01:00
|
|
|
|
2018-08-15 21:53:44 +02:00
|
|
|
@register.inclusion_tag('combo/gnm/airquality_map.html', takes_context=True)
|
|
|
|
def gnm_airquality_map(context):
|
2018-08-15 10:28:03 +02:00
|
|
|
map_cell = Map()
|
2018-08-15 21:53:44 +02:00
|
|
|
map_cell.initial_state = 'device-location'
|
2018-08-15 10:28:03 +02:00
|
|
|
map_cell.initial_zoom = '15'
|
|
|
|
map_cell.min_zoom = '10'
|
|
|
|
map_cell.max_zoom = '19'
|
2018-08-15 21:53:44 +02:00
|
|
|
context.push(map_cell.get_cell_extra_context({}))
|
2018-08-15 10:28:03 +02:00
|
|
|
return context
|
2018-08-15 21:54:28 +02:00
|
|
|
|
2021-01-11 21:31:40 +01:00
|
|
|
|
2018-08-15 21:54:28 +02:00
|
|
|
_json_script_escapes = {
|
|
|
|
ord('>'): '\\u003E',
|
|
|
|
ord('<'): '\\u003C',
|
|
|
|
ord('&'): '\\u0026',
|
|
|
|
}
|
|
|
|
|
2021-01-11 21:31:40 +01:00
|
|
|
|
2018-08-15 21:54:28 +02:00
|
|
|
@register.filter(is_safe=True)
|
|
|
|
def json_script(value, element_id):
|
|
|
|
json_str = json.dumps(value, cls=DjangoJSONEncoder)
|
|
|
|
json_str = json_str.replace('>', '\\u003E').replace('<', '\\u003C').replace('&', '\\u0026')
|
|
|
|
return format_html('<script id="{}" type="application/json">{}</script>', element_id, mark_safe(json_str))
|
2021-01-11 21:31:40 +01:00
|
|
|
|
2018-09-16 17:06:36 +02:00
|
|
|
|
2020-04-07 17:57:34 +02:00
|
|
|
@register.simple_tag
|
2018-09-16 17:06:36 +02:00
|
|
|
def get_goto_cell(page, request):
|
|
|
|
try:
|
|
|
|
cell = ConfigJsonCell.objects.get(id=request.GET['to'])
|
2020-03-05 11:24:33 +01:00
|
|
|
except (ConfigJsonCell.DoesNotExist, ValueError, KeyError):
|
2018-09-16 17:06:36 +02:00
|
|
|
return None
|
|
|
|
if cell.page.template_name != 'place':
|
|
|
|
return cell
|
|
|
|
# create an alternate version of cell
|
|
|
|
cell.id = None
|
|
|
|
cell.placeholder = '_auto_tile'
|
|
|
|
cell.page = page
|
|
|
|
cell.save()
|
|
|
|
return cell
|
2018-09-25 09:12:09 +02:00
|
|
|
|
2021-01-11 21:31:40 +01:00
|
|
|
|
2020-04-07 17:57:34 +02:00
|
|
|
@register.simple_tag
|
2018-09-25 09:12:09 +02:00
|
|
|
def get_collectivity_slugs():
|
2019-10-14 20:26:33 +02:00
|
|
|
return list(settings.COLLECTIVITY_LABELS.keys())
|
2018-09-25 20:47:18 +02:00
|
|
|
|
2021-01-11 21:31:40 +01:00
|
|
|
|
2018-09-25 20:47:18 +02:00
|
|
|
@register.filter
|
|
|
|
def indice_values(indices):
|
|
|
|
for key in ('indice_j-1', 'indice_j', 'indice_j+1'):
|
|
|
|
if indices.get(key):
|
|
|
|
yield indices.get(key)
|
2018-10-04 15:38:21 +02:00
|
|
|
|
2021-01-11 21:31:40 +01:00
|
|
|
|
2018-10-04 15:38:21 +02:00
|
|
|
@register.filter
|
|
|
|
def airquality_hack(cell, request):
|
|
|
|
if cell.key == 'airquality' and not cell.parameters:
|
|
|
|
# Cell on airquality dynamic page, it has empty cell.parameters as it
|
|
|
|
# gets those from the query string. In order to get un/favorite link
|
|
|
|
# to work we need to duplicate the cell into a concrete object with a
|
|
|
|
# copy of query parameters within.
|
|
|
|
if request.GET.get('ctx'):
|
|
|
|
ctx = signing.loads(request.GET['ctx'])
|
|
|
|
lon, lat = ctx['q_lon'], ctx['q_lat']
|
|
|
|
else:
|
|
|
|
lon, lat = request.path.split('/')[-2].split(',')
|
|
|
|
cell.parameters = {'lon': lon, 'lat': lat}
|
|
|
|
cell.placeholder = '_auto_tile'
|
|
|
|
cell.id = None
|
|
|
|
cell.save()
|
|
|
|
return cell
|
2020-12-21 11:41:12 +01:00
|
|
|
|
|
|
|
|
|
|
|
@register.simple_tag
|
|
|
|
def get_known_tile_types():
|
|
|
|
return list(settings.JSON_CELL_TYPES.keys())
|
2021-03-16 09:56:35 +01:00
|
|
|
|
|
|
|
|
|
|
|
@register.filter
|
|
|
|
def form_asset_url(url, is_safe=True):
|
|
|
|
parsed = urllib.parse.urlparse(url)
|
|
|
|
form_slug = parsed.path.split('/')[1]
|
|
|
|
# look for wcs service where the form is from, to get service prefix
|
|
|
|
for service_key, service_dict in settings.KNOWN_SERVICES.get('wcs').items():
|
|
|
|
if url.startswith(service_dict.get('url')):
|
|
|
|
if service_key.startswith('_'):
|
|
|
|
ou_prefix = service_key.rsplit('_', 1)[0] + '_'
|
|
|
|
else:
|
|
|
|
ou_prefix = ''
|
|
|
|
break
|
|
|
|
# get matching portal
|
|
|
|
combo_service = settings.KNOWN_SERVICES.get('combo').get(ou_prefix + 'portal')
|
|
|
|
if not combo_service:
|
|
|
|
return ''
|
|
|
|
# return well-known stable asset URL
|
|
|
|
return urllib.parse.urljoin(combo_service.get('url'), '/assets/wcs:form:picture:eservices:%s' % form_slug)
|