Compare commits
218 Commits
main
...
wip/python
Author | SHA1 | Date |
---|---|---|
Frédéric Péters | 094cae52e5 | |
Frédéric Péters | 00e130ae53 | |
Frédéric Péters | b5c262dc64 | |
Frédéric Péters | 731697ebc9 | |
Frédéric Péters | cb17e126b7 | |
Frédéric Péters | a87a181371 | |
Frédéric Péters | 7a29de02bb | |
Frédéric Péters | f1b90ce1e2 | |
Frédéric Péters | 57f96ee3b4 | |
Frédéric Péters | 7b0978cf00 | |
Frédéric Péters | 4b9c230141 | |
Frédéric Péters | 4f3dc60311 | |
Frédéric Péters | 4f488c1d57 | |
Frédéric Péters | 9cf9355463 | |
Frédéric Péters | 03987a6e6c | |
Frédéric Péters | 625b574280 | |
Frédéric Péters | 7dcd2d485b | |
Frédéric Péters | 52df31918f | |
Frédéric Péters | 4aa96b93c5 | |
Frédéric Péters | 0be6a94f13 | |
Frédéric Péters | 0eb5415bee | |
Frédéric Péters | e9771cc834 | |
Frédéric Péters | a1414e1ebc | |
Frédéric Péters | 478ff982a1 | |
Frédéric Péters | 82fd9489bf | |
Frédéric Péters | 88d47903d1 | |
Frédéric Péters | a923e056ea | |
Frédéric Péters | 2877790b24 | |
Frédéric Péters | 3145fd7367 | |
Frédéric Péters | 1bb05944f8 | |
Frédéric Péters | 54d4b4686b | |
Frédéric Péters | 21dd276479 | |
Frédéric Péters | 12094de8e4 | |
Frédéric Péters | 859683f690 | |
Frédéric Péters | a55daeec35 | |
Frédéric Péters | b48283f626 | |
Frédéric Péters | 30a9d2d4c3 | |
Frédéric Péters | e45c19e794 | |
Frédéric Péters | badde34b2e | |
Frédéric Péters | 191f01bd7e | |
Frédéric Péters | 762560fcaf | |
Frédéric Péters | 228053c9ce | |
Frédéric Péters | 158d6fac2d | |
Frédéric Péters | 207d3ae91e | |
Frédéric Péters | fdb0ad1aa1 | |
Frédéric Péters | 1e8ca52e3c | |
Frédéric Péters | 00fc3deef0 | |
Frédéric Péters | f4ad46acc6 | |
Frédéric Péters | 370357e40a | |
Frédéric Péters | 3fefe4a344 | |
Frédéric Péters | 78084aa023 | |
Frédéric Péters | 7a446f5c88 | |
Frédéric Péters | 1591530889 | |
Frédéric Péters | 8e52fc8814 | |
Frédéric Péters | b3bc038a2e | |
Frédéric Péters | e0e128de4b | |
Frédéric Péters | 1ec58ee869 | |
Frédéric Péters | 96bf98cd7c | |
Frédéric Péters | a33e8ff8b4 | |
Frédéric Péters | 6875bf8954 | |
Frédéric Péters | 72bd7af167 | |
Frédéric Péters | dabe208677 | |
Frédéric Péters | 5793e5c509 | |
Frédéric Péters | 8158b456fa | |
Frédéric Péters | d62e945441 | |
Frédéric Péters | 9b2e2c8d13 | |
Frédéric Péters | ed9cdb777c | |
Frédéric Péters | 844dea5853 | |
Frédéric Péters | 31cc721510 | |
Frédéric Péters | 8c1fc39764 | |
Frédéric Péters | b401245d4b | |
Frédéric Péters | 87e940ddfc | |
Frédéric Péters | 248e7563c7 | |
Frédéric Péters | 9f9f623cb4 | |
Frédéric Péters | a68dccf8dd | |
Frédéric Péters | dc82d0f918 | |
Frédéric Péters | 21ee4427c6 | |
Frédéric Péters | b1fa54eb33 | |
Frédéric Péters | 6c89239f3d | |
Frédéric Péters | 129306e47d | |
Frédéric Péters | 85a4024417 | |
Frédéric Péters | 091ba4ae15 | |
Frédéric Péters | 81f2f7d90c | |
Frédéric Péters | 7d329c856b | |
Frédéric Péters | d0b9ff274c | |
Frédéric Péters | a4b8063976 | |
Frédéric Péters | 5a7161bbb5 | |
Frédéric Péters | fca37a3205 | |
Frédéric Péters | 4c6c08dcae | |
Frédéric Péters | 10d4451122 | |
Frédéric Péters | 3444d4bec8 | |
Frédéric Péters | d0b2407738 | |
Frédéric Péters | 5f8c243cd0 | |
Frédéric Péters | 7523343893 | |
Frédéric Péters | c3057f76bd | |
Frédéric Péters | ca35deba02 | |
Frédéric Péters | c089fbe70c | |
Frédéric Péters | 937b2c538d | |
Frédéric Péters | 4f7d4cd6cf | |
Frédéric Péters | 75c18872dc | |
Frédéric Péters | 95d29ec088 | |
Frédéric Péters | 03f2634895 | |
Frédéric Péters | 3938a8ab87 | |
Frédéric Péters | d1a7e6e187 | |
Frédéric Péters | b97f9a63f0 | |
Frédéric Péters | 1356365a15 | |
Frédéric Péters | 2cd20e666d | |
Frédéric Péters | 48118f8781 | |
Frédéric Péters | 4322fe9f64 | |
Frédéric Péters | e0bdd1c60f | |
Frédéric Péters | 23ab03be8a | |
Frédéric Péters | a97b93d163 | |
Frédéric Péters | 71f4a0fd3e | |
Frédéric Péters | 5395f9255f | |
Frédéric Péters | af22690d31 | |
Frédéric Péters | e4f335a604 | |
Frédéric Péters | f5880908bd | |
Frédéric Péters | 0f5d4555d2 | |
Frédéric Péters | f8587a00e1 | |
Frédéric Péters | 9cb2e4cc43 | |
Frédéric Péters | 8831c91a51 | |
Frédéric Péters | 697949b551 | |
Frédéric Péters | 8f3419baf1 | |
Frédéric Péters | 0e120fb4d8 | |
Frédéric Péters | 654d3d3b91 | |
Frédéric Péters | e43bba68f5 | |
Frédéric Péters | dbbb116552 | |
Frédéric Péters | ec09c2ea6a | |
Frédéric Péters | 7071ec96c4 | |
Frédéric Péters | 1bdd4cfda2 | |
Frédéric Péters | 0e01b626b1 | |
Frédéric Péters | 0a68468493 | |
Frédéric Péters | d0b4f24584 | |
Frédéric Péters | ba6e5cd5a1 | |
Frédéric Péters | 2aed9c1bc4 | |
Frédéric Péters | 344498ae4d | |
Frédéric Péters | 228457be9d | |
Frédéric Péters | 4e6f21ef43 | |
Frédéric Péters | 5a4302b461 | |
Frédéric Péters | 4b181fc1bf | |
Frédéric Péters | fd0b882578 | |
Frédéric Péters | e9f5112fbe | |
Frédéric Péters | a231aa0d97 | |
Frédéric Péters | bed2079a3e | |
Frédéric Péters | 259c62a197 | |
Frédéric Péters | 82cc9a3a98 | |
Frédéric Péters | 9fe1e84921 | |
Frédéric Péters | 8be31ef846 | |
Frédéric Péters | 53812ecd8e | |
Frédéric Péters | 7bd1967fc4 | |
Frédéric Péters | 22ba97571f | |
Frédéric Péters | ec72e371d3 | |
Frédéric Péters | c3fe5545ac | |
Frédéric Péters | 30ff9ff561 | |
Frédéric Péters | e541759a67 | |
Frédéric Péters | 1987c5407f | |
Frédéric Péters | fd88b46128 | |
Frédéric Péters | 5da68239d3 | |
Frédéric Péters | db6fe5f504 | |
Frédéric Péters | 4746d75099 | |
Frédéric Péters | 05abc1038d | |
Frédéric Péters | a295a0b441 | |
Frédéric Péters | e4333e6719 | |
Frédéric Péters | 6d8c2ffb14 | |
Frédéric Péters | 6f279b7b28 | |
Frédéric Péters | ef77a23f79 | |
Frédéric Péters | 7d7cea05a6 | |
Frédéric Péters | b79a1f5fbc | |
Frédéric Péters | a6e27b1df3 | |
Frédéric Péters | cf248b9e5a | |
Frédéric Péters | 5f1629a898 | |
Frédéric Péters | 90fdf2c398 | |
Frédéric Péters | 0f4fbdd7ff | |
Frédéric Péters | 1a7e2ec81b | |
Frédéric Péters | d174de9cf7 | |
Frédéric Péters | 95c5c36103 | |
Frédéric Péters | 07357b8549 | |
Frédéric Péters | b73c5cc666 | |
Frédéric Péters | dbec13c73d | |
Frédéric Péters | b46c654323 | |
Frédéric Péters | 8c0658c2c3 | |
Frédéric Péters | d4e9a16468 | |
Frédéric Péters | 56e9be1142 | |
Frédéric Péters | 6d80317832 | |
Frédéric Péters | 4fced111d6 | |
Frédéric Péters | 1346c90386 | |
Frédéric Péters | d4840892b4 | |
Frédéric Péters | d6bce19689 | |
Frédéric Péters | 9dd19f2f05 | |
Frédéric Péters | 4f827110bf | |
Frédéric Péters | 0db7cb0804 | |
Frédéric Péters | 61efee3db9 | |
Frédéric Péters | be4101e5a7 | |
Frédéric Péters | 5d7bc70c0f | |
Frédéric Péters | a4743c216c | |
Frédéric Péters | 11ad87508c | |
Frédéric Péters | 2927c87edd | |
Frédéric Péters | 0e968f3b83 | |
Frédéric Péters | a321f200be | |
Frédéric Péters | 4d39395c39 | |
Frédéric Péters | 48ffcf975a | |
Frédéric Péters | 1ea19f1c74 | |
Frédéric Péters | e4bd408044 | |
Frédéric Péters | d33ae185a7 | |
Frédéric Péters | 75516f7b6a | |
Frédéric Péters | 78c4a5f640 | |
Frédéric Péters | c586e8262d | |
Frédéric Péters | 87253343d4 | |
Frédéric Péters | a17d189124 | |
Frédéric Péters | 29d0f9e8eb | |
Frédéric Péters | 86a623669a | |
Frédéric Péters | 71508015a3 | |
Frédéric Péters | 650c9c3093 | |
Frédéric Péters | 7614f40bcd | |
Frédéric Péters | b9c1293608 | |
Frédéric Péters | deaf0c34f1 | |
Frédéric Péters | 34a382f03e | |
Frédéric Péters | 95bc775346 |
|
@ -1,5 +1,5 @@
|
||||||
[run]
|
[run]
|
||||||
omit = wcs/ctl/Bouncers/*.py wcs/qommon/vendor/*.py
|
omit = wcs/qommon/vendor/*.py
|
||||||
|
|
||||||
[report]
|
[report]
|
||||||
omit = wcs/ctl/Bouncers/*.py wcs/qommon/vendor/*.py
|
omit = wcs/qommon/vendor/*.py
|
||||||
|
|
|
@ -4,9 +4,15 @@ pipeline {
|
||||||
agent any
|
agent any
|
||||||
options { disableConcurrentBuilds() }
|
options { disableConcurrentBuilds() }
|
||||||
stages {
|
stages {
|
||||||
|
stage('Unit Tests (Python 2)') {
|
||||||
|
steps {
|
||||||
|
sh 'tox -r -e py2'
|
||||||
|
}
|
||||||
|
}
|
||||||
stage('Unit Tests') {
|
stage('Unit Tests') {
|
||||||
steps {
|
steps {
|
||||||
sh 'tox -r'
|
sh 'git clean -xdf'
|
||||||
|
sh 'tox -r -e py3-pylint-coverage'
|
||||||
}
|
}
|
||||||
post {
|
post {
|
||||||
always {
|
always {
|
||||||
|
@ -36,7 +42,7 @@ pipeline {
|
||||||
always {
|
always {
|
||||||
script {
|
script {
|
||||||
utils = new Utils()
|
utils = new Utils()
|
||||||
utils.mail_notify(currentBuild, env, 'ci+jenkins-wcs@entrouvert.org')
|
utils.mail_notify(currentBuild, env, 'fpeters+jenkins-wcs@entrouvert.org')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
success {
|
success {
|
||||||
|
|
8
README
8
README
|
@ -41,14 +41,6 @@ AUTHORS file for additional credits.
|
||||||
w.c.s. incorporates some other pieces of code, with their own authors and
|
w.c.s. incorporates some other pieces of code, with their own authors and
|
||||||
copyright notices :
|
copyright notices :
|
||||||
|
|
||||||
Email bounce detection code (wcs/ctl/Bounces/*) from Mailman:
|
|
||||||
# http://www.gnu.org/software/mailman/
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
Some artwork from GTK+:
|
Some artwork from GTK+:
|
||||||
# http://www.gtk.org/
|
# http://www.gtk.org/
|
||||||
#
|
#
|
||||||
|
|
|
@ -16,10 +16,10 @@ rm -f coverage.xml
|
||||||
rm -f test_results.xml
|
rm -f test_results.xml
|
||||||
cat << _EOF_ > .coveragerc
|
cat << _EOF_ > .coveragerc
|
||||||
[run]
|
[run]
|
||||||
omit = wcs/ctl/Bouncers/*.py wcs/qommon/vendor/*.py
|
omit = wcs/qommon/vendor/*.py
|
||||||
|
|
||||||
[report]
|
[report]
|
||||||
omit = wcs/ctl/Bouncers/*.py wcs/qommon/vendor/*.py
|
omit = wcs/qommon/vendor/*.py
|
||||||
_EOF_
|
_EOF_
|
||||||
|
|
||||||
# $PIP_BIN install --upgrade 'pip<8'
|
# $PIP_BIN install --upgrade 'pip<8'
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import os
|
import os
|
||||||
import ConfigParser
|
from django.utils.six.moves import configparser as ConfigParser
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ def site_options(request, pub, section, variable, value):
|
||||||
if not config.has_section(section):
|
if not config.has_section(section):
|
||||||
config.add_section(section)
|
config.add_section(section)
|
||||||
config.set(section, variable, value)
|
config.set(section, variable, value)
|
||||||
with file(path, 'w') as site_option:
|
with open(path, 'w') as site_option:
|
||||||
config.write(site_option)
|
config.write(site_option)
|
||||||
|
|
||||||
def fin():
|
def fin():
|
||||||
|
@ -31,7 +31,7 @@ def site_options(request, pub, section, variable, value):
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
config.read([path])
|
config.read([path])
|
||||||
config.remove_option(section, variable)
|
config.remove_option(section, variable)
|
||||||
with file(path, 'w') as site_option:
|
with open(path, 'w') as site_option:
|
||||||
config.write(site_option)
|
config.write(site_option)
|
||||||
request.addfinalizer(fin)
|
request.addfinalizer(fin)
|
||||||
return value
|
return value
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,5 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from StringIO import StringIO
|
|
||||||
import pytest
|
import pytest
|
||||||
import json
|
import json
|
||||||
import shutil
|
import shutil
|
||||||
|
@ -10,13 +9,16 @@ import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
import mock
|
import mock
|
||||||
import re
|
import re
|
||||||
import urllib
|
|
||||||
import urlparse
|
|
||||||
import datetime
|
import datetime
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
from django.utils.encoding import force_bytes, force_text
|
||||||
|
from django.utils.six import StringIO
|
||||||
|
from django.utils.six.moves.urllib import parse as urllib
|
||||||
|
from django.utils.six.moves.urllib import parse as urlparse
|
||||||
|
|
||||||
from quixote import cleanup, get_publisher
|
from quixote import cleanup, get_publisher
|
||||||
from wcs.qommon.http_request import HTTPRequest
|
from wcs.qommon.http_request import HTTPRequest
|
||||||
from wcs.qommon.form import PicklableUpload
|
from wcs.qommon.form import PicklableUpload
|
||||||
|
@ -50,7 +52,7 @@ def pub(request, emails):
|
||||||
pub.cfg['language'] = {'language': 'en'}
|
pub.cfg['language'] = {'language': 'en'}
|
||||||
pub.write_cfg()
|
pub.write_cfg()
|
||||||
|
|
||||||
file(os.path.join(pub.app_dir, 'site-options.cfg'), 'w').write('''\
|
open(os.path.join(pub.app_dir, 'site-options.cfg'), 'w').write('''\
|
||||||
[api-secrets]
|
[api-secrets]
|
||||||
coucou = 1234
|
coucou = 1234
|
||||||
''')
|
''')
|
||||||
|
@ -96,8 +98,8 @@ def sign_uri(uri, user=None, format='json'):
|
||||||
query += '&email=' + urllib.quote(user.email)
|
query += '&email=' + urllib.quote(user.email)
|
||||||
query += '&signature=%s' % urllib.quote(
|
query += '&signature=%s' % urllib.quote(
|
||||||
base64.b64encode(
|
base64.b64encode(
|
||||||
hmac.new('1234',
|
hmac.new(b'1234',
|
||||||
query,
|
force_bytes(query),
|
||||||
hashlib.sha256).digest()))
|
hashlib.sha256).digest()))
|
||||||
return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
|
return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
|
||||||
|
|
||||||
|
@ -137,8 +139,8 @@ def test_get_user_from_api_query_string_error_invalid_signature(pub):
|
||||||
def test_get_user_from_api_query_string_error_missing_timestamp(pub):
|
def test_get_user_from_api_query_string_error_missing_timestamp(pub):
|
||||||
signature = urllib.quote(
|
signature = urllib.quote(
|
||||||
base64.b64encode(
|
base64.b64encode(
|
||||||
hmac.new('1234',
|
hmac.new(b'1234',
|
||||||
'format=json&orig=coucou&algo=sha1',
|
b'format=json&orig=coucou&algo=sha1',
|
||||||
hashlib.sha1).digest()))
|
hashlib.sha1).digest()))
|
||||||
output = get_app(pub).get('/api/user/?format=json&orig=coucou&algo=sha1&signature=%s' % signature, status=403)
|
output = get_app(pub).get('/api/user/?format=json&orig=coucou&algo=sha1&signature=%s' % signature, status=403)
|
||||||
assert output.json['err_desc'] == 'missing/multiple timestamp field'
|
assert output.json['err_desc'] == 'missing/multiple timestamp field'
|
||||||
|
@ -148,8 +150,8 @@ def test_get_user_from_api_query_string_error_missing_email(pub):
|
||||||
query = 'format=json&orig=coucou&algo=sha1×tamp=' + timestamp
|
query = 'format=json&orig=coucou&algo=sha1×tamp=' + timestamp
|
||||||
signature = urllib.quote(
|
signature = urllib.quote(
|
||||||
base64.b64encode(
|
base64.b64encode(
|
||||||
hmac.new('1234',
|
hmac.new(b'1234',
|
||||||
query,
|
force_bytes(query),
|
||||||
hashlib.sha1).digest()))
|
hashlib.sha1).digest()))
|
||||||
output = get_app(pub).get('/api/user/?%s&signature=%s' % (query, signature), status=403)
|
output = get_app(pub).get('/api/user/?%s&signature=%s' % (query, signature), status=403)
|
||||||
assert output.json['err_desc'] == 'no user specified'
|
assert output.json['err_desc'] == 'no user specified'
|
||||||
|
@ -159,8 +161,8 @@ def test_get_user_from_api_query_string_error_unknown_nameid(pub):
|
||||||
query = 'format=json&orig=coucou&algo=sha1&NameID=xxx×tamp=' + timestamp
|
query = 'format=json&orig=coucou&algo=sha1&NameID=xxx×tamp=' + timestamp
|
||||||
signature = urllib.quote(
|
signature = urllib.quote(
|
||||||
base64.b64encode(
|
base64.b64encode(
|
||||||
hmac.new('1234',
|
hmac.new(b'1234',
|
||||||
query,
|
force_bytes(query),
|
||||||
hashlib.sha1).digest()))
|
hashlib.sha1).digest()))
|
||||||
output = get_app(pub).get('/api/user/?%s&signature=%s' % (query, signature), status=403)
|
output = get_app(pub).get('/api/user/?%s&signature=%s' % (query, signature), status=403)
|
||||||
assert output.json['err_desc'] == 'unknown NameID'
|
assert output.json['err_desc'] == 'unknown NameID'
|
||||||
|
@ -172,8 +174,8 @@ def test_get_user_from_api_query_string_error_missing_email_valid_endpoint(pub):
|
||||||
query = 'format=json&orig=coucou&algo=sha1×tamp=' + timestamp
|
query = 'format=json&orig=coucou&algo=sha1×tamp=' + timestamp
|
||||||
signature = urllib.quote(
|
signature = urllib.quote(
|
||||||
base64.b64encode(
|
base64.b64encode(
|
||||||
hmac.new('1234',
|
hmac.new(b'1234',
|
||||||
query,
|
force_bytes(query),
|
||||||
hashlib.sha1).digest()))
|
hashlib.sha1).digest()))
|
||||||
output = get_app(pub).get('/categories?%s&signature=%s' % (query, signature))
|
output = get_app(pub).get('/categories?%s&signature=%s' % (query, signature))
|
||||||
assert output.json == {'data': []}
|
assert output.json == {'data': []}
|
||||||
|
@ -186,8 +188,8 @@ def test_get_user_from_api_query_string_error_unknown_nameid_valid_endpoint(pub)
|
||||||
query = 'format=json&NameID=xxx&orig=coucou&algo=sha1×tamp=' + timestamp
|
query = 'format=json&NameID=xxx&orig=coucou&algo=sha1×tamp=' + timestamp
|
||||||
signature = urllib.quote(
|
signature = urllib.quote(
|
||||||
base64.b64encode(
|
base64.b64encode(
|
||||||
hmac.new('1234',
|
hmac.new(b'1234',
|
||||||
query,
|
force_bytes(query),
|
||||||
hashlib.sha1).digest()))
|
hashlib.sha1).digest()))
|
||||||
output = get_app(pub).get('/categories?%s&signature=%s' % (query, signature))
|
output = get_app(pub).get('/categories?%s&signature=%s' % (query, signature))
|
||||||
assert output.json == {'data': []}
|
assert output.json == {'data': []}
|
||||||
|
@ -199,8 +201,8 @@ def test_get_user_from_api_query_string_error_success_sha1(pub, local_user):
|
||||||
query = 'format=json&orig=coucou&algo=sha1&email=' + urllib.quote(local_user.email) + '×tamp=' + timestamp
|
query = 'format=json&orig=coucou&algo=sha1&email=' + urllib.quote(local_user.email) + '×tamp=' + timestamp
|
||||||
signature = urllib.quote(
|
signature = urllib.quote(
|
||||||
base64.b64encode(
|
base64.b64encode(
|
||||||
hmac.new('1234',
|
hmac.new(b'1234',
|
||||||
query,
|
force_bytes(query),
|
||||||
hashlib.sha1).digest()))
|
hashlib.sha1).digest()))
|
||||||
output = get_app(pub).get('/api/user/?%s&signature=%s' % (query, signature))
|
output = get_app(pub).get('/api/user/?%s&signature=%s' % (query, signature))
|
||||||
assert output.json['user_display_name'] == u'Jean Darmette'
|
assert output.json['user_display_name'] == u'Jean Darmette'
|
||||||
|
@ -210,8 +212,8 @@ def test_get_user_from_api_query_string_error_invalid_signature_algo_mismatch(pu
|
||||||
query = 'format=json&orig=coucou&algo=sha256&email=' + urllib.quote(local_user.email) + '×tamp=' + timestamp
|
query = 'format=json&orig=coucou&algo=sha256&email=' + urllib.quote(local_user.email) + '×tamp=' + timestamp
|
||||||
signature = urllib.quote(
|
signature = urllib.quote(
|
||||||
base64.b64encode(
|
base64.b64encode(
|
||||||
hmac.new('1234',
|
hmac.new(b'1234',
|
||||||
query,
|
force_bytes(query),
|
||||||
hashlib.sha1).digest()))
|
hashlib.sha1).digest()))
|
||||||
output = get_app(pub).get('/api/user/?%s&signature=%s' % (query, signature), status=403)
|
output = get_app(pub).get('/api/user/?%s&signature=%s' % (query, signature), status=403)
|
||||||
assert output.json['err_desc'] == 'invalid signature'
|
assert output.json['err_desc'] == 'invalid signature'
|
||||||
|
@ -221,8 +223,8 @@ def test_get_user_from_api_query_string_error_success_sha256(pub, local_user):
|
||||||
query = 'format=json&orig=coucou&algo=sha256&email=' + urllib.quote(local_user.email) + '×tamp=' + timestamp
|
query = 'format=json&orig=coucou&algo=sha256&email=' + urllib.quote(local_user.email) + '×tamp=' + timestamp
|
||||||
signature = urllib.quote(
|
signature = urllib.quote(
|
||||||
base64.b64encode(
|
base64.b64encode(
|
||||||
hmac.new('1234',
|
hmac.new(b'1234',
|
||||||
query,
|
force_bytes(query),
|
||||||
hashlib.sha256).digest()))
|
hashlib.sha256).digest()))
|
||||||
output = get_app(pub).get('/api/user/?%s&signature=%s' % (query, signature))
|
output = get_app(pub).get('/api/user/?%s&signature=%s' % (query, signature))
|
||||||
assert output.json['user_display_name'] == u'Jean Darmette'
|
assert output.json['user_display_name'] == u'Jean Darmette'
|
||||||
|
@ -329,7 +331,7 @@ def test_formdef_list(pub):
|
||||||
assert resp1.json['data'][0]['redirection'] == False
|
assert resp1.json['data'][0]['redirection'] == False
|
||||||
assert resp1.json['data'][0]['description'] == 'plop'
|
assert resp1.json['data'][0]['description'] == 'plop'
|
||||||
assert resp1.json['data'][0]['keywords'] == ['mobile', 'test']
|
assert resp1.json['data'][0]['keywords'] == ['mobile', 'test']
|
||||||
assert resp1.json['data'][0]['functions'].keys() == ['_receiver']
|
assert list(resp1.json['data'][0]['functions'].keys()) == ['_receiver']
|
||||||
assert resp1.json['data'][0]['functions']['_receiver']['label'] == 'Recipient'
|
assert resp1.json['data'][0]['functions']['_receiver']['label'] == 'Recipient'
|
||||||
assert resp1.json['data'][0]['functions']['_receiver']['role']['slug'] == role.slug
|
assert resp1.json['data'][0]['functions']['_receiver']['role']['slug'] == role.slug
|
||||||
assert resp1.json['data'][0]['functions']['_receiver']['role']['name'] == role.name
|
assert resp1.json['data'][0]['functions']['_receiver']['role']['name'] == role.name
|
||||||
|
@ -781,7 +783,7 @@ def test_formdef_submit_with_varname(pub, local_user):
|
||||||
'date': '1970-01-01',
|
'date': '1970-01-01',
|
||||||
'file': {
|
'file': {
|
||||||
'filename': 'test.txt',
|
'filename': 'test.txt',
|
||||||
'content': base64.b64encode('test'),
|
'content': force_text(base64.b64encode(b'test')),
|
||||||
},
|
},
|
||||||
'map': {
|
'map': {
|
||||||
'lat': 1.5,
|
'lat': 1.5,
|
||||||
|
@ -803,7 +805,7 @@ def test_formdef_submit_with_varname(pub, local_user):
|
||||||
(1970, 1, 1, 0, 0, 0, 3, 1, -1))
|
(1970, 1, 1, 0, 0, 0, 3, 1, -1))
|
||||||
|
|
||||||
assert data_class.get(resp.json['data']['id']).data['4'].orig_filename == 'test.txt'
|
assert data_class.get(resp.json['data']['id']).data['4'].orig_filename == 'test.txt'
|
||||||
assert data_class.get(resp.json['data']['id']).data['4'].get_content() == 'test'
|
assert data_class.get(resp.json['data']['id']).data['4'].get_content() == b'test'
|
||||||
assert data_class.get(resp.json['data']['id']).data['5'] == '1.5;2.25'
|
assert data_class.get(resp.json['data']['id']).data['5'] == '1.5;2.25'
|
||||||
# test bijectivity
|
# test bijectivity
|
||||||
assert (formdef.fields[3].get_json_value(data_class.get(resp.json['data']['id']).data['3']) ==
|
assert (formdef.fields[3].get_json_value(data_class.get(resp.json['data']['id']).data['3']) ==
|
||||||
|
@ -835,7 +837,7 @@ def test_formdef_submit_from_wscall(pub, local_user):
|
||||||
formdata.just_created()
|
formdata.just_created()
|
||||||
|
|
||||||
upload = PicklableUpload('test.txt', 'text/plain', 'ascii')
|
upload = PicklableUpload('test.txt', 'text/plain', 'ascii')
|
||||||
upload.receive(['test'])
|
upload.receive([b'test'])
|
||||||
|
|
||||||
formdata.data = {
|
formdata.data = {
|
||||||
'0': 'xxx',
|
'0': 'xxx',
|
||||||
|
@ -859,8 +861,7 @@ def test_formdef_submit_from_wscall(pub, local_user):
|
||||||
|
|
||||||
payload = json.loads(
|
payload = json.loads(
|
||||||
json.dumps(formdata.get_json_export_dict(),
|
json.dumps(formdata.get_json_export_dict(),
|
||||||
cls=qommon.misc.JSONEncoder,
|
cls=qommon.misc.JSONEncoder))
|
||||||
encoding=get_publisher().site_charset))
|
|
||||||
signed_url = sign_url('http://example.net/api/formdefs/test/submit?orig=coucou', '1234')
|
signed_url = sign_url('http://example.net/api/formdefs/test/submit?orig=coucou', '1234')
|
||||||
url = signed_url[len('http://example.net'):]
|
url = signed_url[len('http://example.net'):]
|
||||||
|
|
||||||
|
@ -896,8 +897,7 @@ def test_formdef_submit_from_wscall(pub, local_user):
|
||||||
|
|
||||||
payload = json.loads(
|
payload = json.loads(
|
||||||
json.dumps(formdata.get_json_export_dict(),
|
json.dumps(formdata.get_json_export_dict(),
|
||||||
cls=qommon.misc.JSONEncoder,
|
cls=qommon.misc.JSONEncoder))
|
||||||
encoding=get_publisher().site_charset))
|
|
||||||
signed_url = sign_url('http://example.net/api/formdefs/test/submit?orig=coucou', '1234')
|
signed_url = sign_url('http://example.net/api/formdefs/test/submit?orig=coucou', '1234')
|
||||||
url = signed_url[len('http://example.net'):]
|
url = signed_url[len('http://example.net'):]
|
||||||
|
|
||||||
|
@ -911,8 +911,7 @@ def test_formdef_submit_from_wscall(pub, local_user):
|
||||||
|
|
||||||
payload = json.loads(
|
payload = json.loads(
|
||||||
json.dumps(formdata.get_json_export_dict(),
|
json.dumps(formdata.get_json_export_dict(),
|
||||||
cls=qommon.misc.JSONEncoder,
|
cls=qommon.misc.JSONEncoder))
|
||||||
encoding=get_publisher().site_charset))
|
|
||||||
signed_url = sign_url('http://example.net/api/formdefs/test/submit?orig=coucou', '1234')
|
signed_url = sign_url('http://example.net/api/formdefs/test/submit?orig=coucou', '1234')
|
||||||
url = signed_url[len('http://example.net'):]
|
url = signed_url[len('http://example.net'):]
|
||||||
|
|
||||||
|
@ -1130,7 +1129,7 @@ def test_formdata(pub, local_user):
|
||||||
formdata = formdef.data_class()()
|
formdata = formdef.data_class()()
|
||||||
date = time.strptime('2014-01-20', '%Y-%m-%d')
|
date = time.strptime('2014-01-20', '%Y-%m-%d')
|
||||||
upload = PicklableUpload('test.txt', 'text/plain', 'ascii')
|
upload = PicklableUpload('test.txt', 'text/plain', 'ascii')
|
||||||
upload.receive(['base64me'])
|
upload.receive([b'base64me'])
|
||||||
formdata.data = {
|
formdata.data = {
|
||||||
'0': 'foo@localhost',
|
'0': 'foo@localhost',
|
||||||
'1': 'xxx',
|
'1': 'xxx',
|
||||||
|
@ -1284,9 +1283,9 @@ def test_formdata_with_workflow_data(pub, local_user):
|
||||||
|
|
||||||
from wcs.qommon.form import PicklableUpload as PicklableUpload3
|
from wcs.qommon.form import PicklableUpload as PicklableUpload3
|
||||||
upload = PicklableUpload3('test.txt', 'text/plain', 'ascii')
|
upload = PicklableUpload3('test.txt', 'text/plain', 'ascii')
|
||||||
upload.receive(['test'])
|
upload.receive([b'test'])
|
||||||
upload2 = PicklableUpload3('test.txt', 'text/plain', 'ascii')
|
upload2 = PicklableUpload3('test.txt', 'text/plain', 'ascii')
|
||||||
upload2.receive(['test'])
|
upload2.receive([b'test'])
|
||||||
formdata.workflow_data = {'blah': upload, 'blah2': upload2, 'xxx': 23}
|
formdata.workflow_data = {'blah': upload, 'blah2': upload2, 'xxx': 23}
|
||||||
formdata.store()
|
formdata.store()
|
||||||
|
|
||||||
|
@ -1295,8 +1294,8 @@ def test_formdata_with_workflow_data(pub, local_user):
|
||||||
assert resp.json['workflow']['data']['xxx'] == 23
|
assert resp.json['workflow']['data']['xxx'] == 23
|
||||||
assert resp.json['workflow']['data']['blah']['filename'] == 'test.txt'
|
assert resp.json['workflow']['data']['blah']['filename'] == 'test.txt'
|
||||||
assert resp.json['workflow']['data']['blah']['content_type'] == 'text/plain'
|
assert resp.json['workflow']['data']['blah']['content_type'] == 'text/plain'
|
||||||
assert base64.decodestring(resp.json['workflow']['data']['blah']['content']) == 'test'
|
assert base64.decodestring(force_bytes(resp.json['workflow']['data']['blah']['content'])) == b'test'
|
||||||
assert base64.decodestring(resp.json['workflow']['data']['blah2']['content']) == 'test'
|
assert base64.decodestring(force_bytes(resp.json['workflow']['data']['blah2']['content'])) == b'test'
|
||||||
|
|
||||||
def test_user_by_nameid(pub, local_user):
|
def test_user_by_nameid(pub, local_user):
|
||||||
resp = get_app(pub).get(sign_uri('/api/users/xyz/', user=local_user),
|
resp = get_app(pub).get(sign_uri('/api/users/xyz/', user=local_user),
|
||||||
|
@ -1536,7 +1535,7 @@ def test_user_drafts(pub, local_user):
|
||||||
|
|
||||||
formdata = formdef.data_class()()
|
formdata = formdef.data_class()()
|
||||||
upload = PicklableUpload('test.txt', 'text/plain', 'ascii')
|
upload = PicklableUpload('test.txt', 'text/plain', 'ascii')
|
||||||
upload.receive(['base64me'])
|
upload.receive([b'base64me'])
|
||||||
formdata.data = {'0': 'foo@localhost', '1': 'xxx', '2': upload}
|
formdata.data = {'0': 'foo@localhost', '1': 'xxx', '2': upload}
|
||||||
formdata.user_id = local_user.id
|
formdata.user_id = local_user.id
|
||||||
formdata.page_no = 1
|
formdata.page_no = 1
|
||||||
|
@ -1602,7 +1601,7 @@ def test_api_list_formdata(pub, local_user):
|
||||||
formdata = data_class()
|
formdata = data_class()
|
||||||
date = time.strptime('2014-01-20', '%Y-%m-%d')
|
date = time.strptime('2014-01-20', '%Y-%m-%d')
|
||||||
upload = PicklableUpload('test.txt', 'text/plain', 'ascii')
|
upload = PicklableUpload('test.txt', 'text/plain', 'ascii')
|
||||||
upload.receive(['base64me'])
|
upload.receive([b'base64me'])
|
||||||
formdata.data = {'0': 'FOO BAR %d' % i, '2': upload}
|
formdata.data = {'0': 'FOO BAR %d' % i, '2': upload}
|
||||||
formdata.user_id = local_user.id
|
formdata.user_id = local_user.id
|
||||||
if i%4 == 0:
|
if i%4 == 0:
|
||||||
|
@ -1723,7 +1722,7 @@ def test_api_anonymized_formdata(pub, local_user, admin_user):
|
||||||
formdata = data_class()
|
formdata = data_class()
|
||||||
date = time.strptime('2014-01-20', '%Y-%m-%d')
|
date = time.strptime('2014-01-20', '%Y-%m-%d')
|
||||||
upload = PicklableUpload('test.txt', 'text/plain', 'ascii')
|
upload = PicklableUpload('test.txt', 'text/plain', 'ascii')
|
||||||
upload.receive(['base64me'])
|
upload.receive([b'base64me'])
|
||||||
formdata.data = {'0': 'FOO BAR %d' % i, '2': upload}
|
formdata.data = {'0': 'FOO BAR %d' % i, '2': upload}
|
||||||
formdata.user_id = local_user.id
|
formdata.user_id = local_user.id
|
||||||
if i%4 == 0:
|
if i%4 == 0:
|
||||||
|
@ -1830,7 +1829,7 @@ def test_api_geojson_formdata(pub, local_user):
|
||||||
resp = get_app(pub).get(sign_uri('/api/forms/test/geojson?anonymise', user=local_user), status=403)
|
resp = get_app(pub).get(sign_uri('/api/forms/test/geojson?anonymise', user=local_user), status=403)
|
||||||
|
|
||||||
upload = PicklableUpload('test.txt', 'text/plain', 'ascii')
|
upload = PicklableUpload('test.txt', 'text/plain', 'ascii')
|
||||||
upload.receive(['base64me'])
|
upload.receive([b'base64me'])
|
||||||
|
|
||||||
foobar = '<font color="red">FOO BAR</font>'
|
foobar = '<font color="red">FOO BAR</font>'
|
||||||
username = '<font color="red">Jean Darmette</font>'
|
username = '<font color="red">Jean Darmette</font>'
|
||||||
|
@ -2107,7 +2106,7 @@ def ics_data(local_user):
|
||||||
FormDef.wipe()
|
FormDef.wipe()
|
||||||
formdef = FormDef()
|
formdef = FormDef()
|
||||||
formdef.url_name = 'test'
|
formdef.url_name = 'test'
|
||||||
formdef.name = 'test\xc3\xa9'
|
formdef.name = 'testé'
|
||||||
formdef.workflow_roles = {'_receiver': role.id}
|
formdef.workflow_roles = {'_receiver': role.id}
|
||||||
formdef.fields = [
|
formdef.fields = [
|
||||||
fields.StringField(id='0', label='foobar', varname='foobar'),
|
fields.StringField(id='0', label='foobar', varname='foobar'),
|
||||||
|
@ -2151,24 +2150,24 @@ def test_api_ics_formdata(pub, local_user, ics_data):
|
||||||
# check it gets the data
|
# check it gets the data
|
||||||
resp = get_app(pub).get(sign_uri('/api/forms/test/ics/foobar', user=local_user))
|
resp = get_app(pub).get(sign_uri('/api/forms/test/ics/foobar', user=local_user))
|
||||||
resp2 = get_app(pub).get(sign_uri('/api/forms/test/ics/foobar/', user=local_user))
|
resp2 = get_app(pub).get(sign_uri('/api/forms/test/ics/foobar/', user=local_user))
|
||||||
assert remove_dtstamp(resp.body) == remove_dtstamp(resp2.body)
|
assert remove_dtstamp(resp.text) == remove_dtstamp(resp2.text)
|
||||||
assert resp.headers['content-type'] == 'text/calendar; charset=utf-8'
|
assert resp.headers['content-type'] == 'text/calendar; charset=utf-8'
|
||||||
assert resp.body.count('BEGIN:VEVENT') == 10
|
assert resp.text.count('BEGIN:VEVENT') == 10
|
||||||
# check that description contains form name, display id, workflow status,
|
# check that description contains form name, display id, workflow status,
|
||||||
# backoffice url and attached user
|
# backoffice url and attached user
|
||||||
pattern = re.compile('DESCRIPTION:testé \| 1-\d+ \| New', re.MULTILINE)
|
pattern = re.compile(u'DESCRIPTION:testé \| 1-\d+ \| New', re.MULTILINE)
|
||||||
m = pattern.findall(resp.body)
|
m = pattern.findall(resp.text)
|
||||||
assert len(m) == 10
|
assert len(m) == 10
|
||||||
assert resp.body.count('Jean Darmette') == 10
|
assert resp.text.count('Jean Darmette') == 10
|
||||||
assert resp.body.count('DTSTART') == 10
|
assert resp.text.count('DTSTART') == 10
|
||||||
|
|
||||||
# check with a filter
|
# check with a filter
|
||||||
resp = get_app(pub).get(sign_uri('/api/forms/test/ics/foobar?filter=done', user=local_user))
|
resp = get_app(pub).get(sign_uri('/api/forms/test/ics/foobar?filter=done', user=local_user))
|
||||||
assert resp.body.count('BEGIN:VEVENT') == 20
|
assert resp.text.count('BEGIN:VEVENT') == 20
|
||||||
pattern = re.compile('DESCRIPTION:testé \| 1-\d+ \| Finished', re.MULTILINE)
|
pattern = re.compile(u'DESCRIPTION:testé \| 1-\d+ \| Finished', re.MULTILINE)
|
||||||
m = pattern.findall(resp.body)
|
m = pattern.findall(resp.text)
|
||||||
assert len(m) == 20
|
assert len(m) == 20
|
||||||
assert resp.body.count('Jean Darmette') == 20
|
assert resp.text.count('Jean Darmette') == 20
|
||||||
|
|
||||||
# check 404 on erroneous field var
|
# check 404 on erroneous field var
|
||||||
resp = get_app(pub).get(sign_uri('/api/forms/test/ics/xxx', user=local_user), status=404)
|
resp = get_app(pub).get(sign_uri('/api/forms/test/ics/xxx', user=local_user), status=404)
|
||||||
|
@ -2182,9 +2181,9 @@ def test_api_ics_formdata(pub, local_user, ics_data):
|
||||||
# check ics data with start and end varnames
|
# check ics data with start and end varnames
|
||||||
resp = get_app(pub).get(sign_uri('/api/forms/test/ics/foobar/foobar2', user=local_user))
|
resp = get_app(pub).get(sign_uri('/api/forms/test/ics/foobar/foobar2', user=local_user))
|
||||||
resp2 = get_app(pub).get(sign_uri('/api/forms/test/ics/foobar/foobar2/', user=local_user))
|
resp2 = get_app(pub).get(sign_uri('/api/forms/test/ics/foobar/foobar2/', user=local_user))
|
||||||
assert remove_dtstamp(resp.body) == remove_dtstamp(resp2.body)
|
assert remove_dtstamp(resp.text) == remove_dtstamp(resp2.text)
|
||||||
assert resp.body.count('DTSTART') == 10
|
assert resp.text.count('DTSTART') == 10
|
||||||
assert resp.body.count('DTEND') == 10
|
assert resp.text.count('DTEND') == 10
|
||||||
|
|
||||||
def test_api_ics_formdata_http_auth(pub, local_user, ics_data):
|
def test_api_ics_formdata_http_auth(pub, local_user, ics_data):
|
||||||
role = Role.select()[0]
|
role = Role.select()[0]
|
||||||
|
@ -2210,7 +2209,7 @@ def test_api_ics_formdata_http_auth(pub, local_user, ics_data):
|
||||||
# check it gets the data
|
# check it gets the data
|
||||||
resp = app.get('/api/forms/test/ics/foobar?email=%s' % local_user.email, status=200)
|
resp = app.get('/api/forms/test/ics/foobar?email=%s' % local_user.email, status=200)
|
||||||
assert resp.headers['content-type'] == 'text/calendar; charset=utf-8'
|
assert resp.headers['content-type'] == 'text/calendar; charset=utf-8'
|
||||||
assert resp.body.count('BEGIN:VEVENT') == 10
|
assert resp.text.count('BEGIN:VEVENT') == 10
|
||||||
|
|
||||||
# check it fails with a different password
|
# check it fails with a different password
|
||||||
app.authorization = ('Basic', ('user', 'password2'))
|
app.authorization = ('Basic', ('user', 'password2'))
|
||||||
|
@ -2599,7 +2598,7 @@ def test_reverse_geocoding(pub):
|
||||||
get_app(pub).get('/api/reverse-geocoding', status=400)
|
get_app(pub).get('/api/reverse-geocoding', status=400)
|
||||||
resp = get_app(pub).get('/api/reverse-geocoding?lat=0&lon=0')
|
resp = get_app(pub).get('/api/reverse-geocoding?lat=0&lon=0')
|
||||||
assert resp.content_type == 'application/json'
|
assert resp.content_type == 'application/json'
|
||||||
assert resp.body == json.dumps({'address': 'xxx'})
|
assert resp.text == json.dumps({'address': 'xxx'})
|
||||||
assert urlopen.call_args[0][0] == 'https://nominatim.entrouvert.org/reverse?zoom=18&format=json&addressdetails=1&lat=0&lon=0&accept-language=en'
|
assert urlopen.call_args[0][0] == 'https://nominatim.entrouvert.org/reverse?zoom=18&format=json&addressdetails=1&lat=0&lon=0&accept-language=en'
|
||||||
|
|
||||||
pub.site_options.add_section('options')
|
pub.site_options.add_section('options')
|
||||||
|
@ -2686,7 +2685,7 @@ def test_geocoding(pub):
|
||||||
get_app(pub).get('/api/geocoding', status=400)
|
get_app(pub).get('/api/geocoding', status=400)
|
||||||
resp = get_app(pub).get('/api/geocoding?q=test')
|
resp = get_app(pub).get('/api/geocoding?q=test')
|
||||||
assert resp.content_type == 'application/json'
|
assert resp.content_type == 'application/json'
|
||||||
assert resp.body == json.dumps([{'lat': 0, 'lon': 0}])
|
assert resp.text == json.dumps([{'lat': 0, 'lon': 0}])
|
||||||
assert urlopen.call_args[0][0] == 'https://nominatim.entrouvert.org/search?format=json&q=test&accept-language=en'
|
assert urlopen.call_args[0][0] == 'https://nominatim.entrouvert.org/search?format=json&q=test&accept-language=en'
|
||||||
|
|
||||||
pub.site_options.add_section('options')
|
pub.site_options.add_section('options')
|
||||||
|
|
|
@ -23,7 +23,7 @@ def pub(request):
|
||||||
pub.write_cfg()
|
pub.write_cfg()
|
||||||
req = HTTPRequest(None, {'SERVER_NAME': 'example.net', 'SCRIPT_NAME': ''})
|
req = HTTPRequest(None, {'SERVER_NAME': 'example.net', 'SCRIPT_NAME': ''})
|
||||||
req.response.filter = {}
|
req.response.filter = {}
|
||||||
req.user = None
|
req._user = None
|
||||||
pub._set_request(req)
|
pub._set_request(req)
|
||||||
req.session = sessions.BasicSession(id=1)
|
req.session = sessions.BasicSession(id=1)
|
||||||
pub.set_config(req)
|
pub.set_config(req)
|
||||||
|
|
|
@ -38,16 +38,16 @@ def test_login_logout(pub):
|
||||||
resp = login(get_app(pub), username='foo', password='foo').get('/')
|
resp = login(get_app(pub), username='foo', password='foo').get('/')
|
||||||
resp = resp.click('Logout')
|
resp = resp.click('Logout')
|
||||||
resp = resp.follow()
|
resp = resp.follow()
|
||||||
assert resp.body == resp_initial.body
|
assert resp.text == resp_initial.text
|
||||||
|
|
||||||
def test_register_account(pub):
|
def test_register_account(pub):
|
||||||
resp = get_app(pub).get('/').click('Login').follow()
|
resp = get_app(pub).get('/').click('Login').follow()
|
||||||
assert not 'register' in resp.body
|
assert not 'register' in resp.text
|
||||||
|
|
||||||
pub.cfg['identities'] = {'creation': 'self'}
|
pub.cfg['identities'] = {'creation': 'self'}
|
||||||
pub.write_cfg()
|
pub.write_cfg()
|
||||||
resp = get_app(pub).get('/').click('Login').follow()
|
resp = get_app(pub).get('/').click('Login').follow()
|
||||||
assert 'register'in resp.body
|
assert 'register'in resp.text
|
||||||
resp = resp.click('New Account page')
|
resp = resp.click('New Account page')
|
||||||
resp.form['username'] = 'foobar'
|
resp.form['username'] = 'foobar'
|
||||||
assert resp.form.submit().location == 'http://example.net/login/'
|
assert resp.form.submit().location == 'http://example.net/login/'
|
||||||
|
@ -61,12 +61,12 @@ def test_login_2auth(pub_2auth):
|
||||||
resp.form['username'] = 'foo'
|
resp.form['username'] = 'foo'
|
||||||
resp.form['password'] = 'foo'
|
resp.form['password'] = 'foo'
|
||||||
resp = resp.form.submit().follow()
|
resp = resp.form.submit().follow()
|
||||||
assert '/logout' in resp.body
|
assert '/logout' in resp.text
|
||||||
|
|
||||||
resp = get_app(pub_2auth).get('/').click('Login').follow()
|
resp = get_app(pub_2auth).get('/').click('Login').follow()
|
||||||
resp.form['method'] = 'SAML identity provider'
|
resp.form['method'] = 'SAML identity provider'
|
||||||
resp = resp.form.submit().follow()
|
resp = resp.form.submit().follow()
|
||||||
assert 'SSO support is not yet configured' in resp.body
|
assert 'SSO support is not yet configured' in resp.text
|
||||||
|
|
||||||
def test_register_2auth(pub_2auth):
|
def test_register_2auth(pub_2auth):
|
||||||
pub_2auth.cfg['identities'] = {'creation': 'self'}
|
pub_2auth.cfg['identities'] = {'creation': 'self'}
|
||||||
|
@ -74,7 +74,7 @@ def test_register_2auth(pub_2auth):
|
||||||
resp = get_app(pub_2auth).get('/register/')
|
resp = get_app(pub_2auth).get('/register/')
|
||||||
resp.form['method'] = 'Username / password'
|
resp.form['method'] = 'Username / password'
|
||||||
resp = resp.form.submit().follow()
|
resp = resp.form.submit().follow()
|
||||||
assert 'New Account' in resp.body
|
assert 'New Account' in resp.text
|
||||||
|
|
||||||
resp = get_app(pub_2auth).get('/register/')
|
resp = get_app(pub_2auth).get('/register/')
|
||||||
resp.form['method'] = 'SAML identity provider'
|
resp.form['method'] = 'SAML identity provider'
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,22 +0,0 @@
|
||||||
import email
|
|
||||||
import StringIO
|
|
||||||
|
|
||||||
from wcs.ctl.process_bounce import CmdProcessBounce
|
|
||||||
|
|
||||||
def test_normal_email():
|
|
||||||
msg = email.message_from_string('test')
|
|
||||||
msg['From'] = 'bar@example.net'
|
|
||||||
msg['To'] = 'foo@example.net'
|
|
||||||
addrs = CmdProcessBounce.get_bounce_addrs(msg)
|
|
||||||
assert addrs is None
|
|
||||||
|
|
||||||
def test_bounce_email():
|
|
||||||
msg = email.message_from_string('test')
|
|
||||||
msg['From'] = 'bar@example.net'
|
|
||||||
msg['To'] = 'foo@example.net'
|
|
||||||
|
|
||||||
# this is how exim adds failed recipients in its outgoing bounce emails
|
|
||||||
msg['x-failed-recipients'] = 'baz@example.net'
|
|
||||||
|
|
||||||
addrs = CmdProcessBounce.get_bounce_addrs(msg)
|
|
||||||
assert addrs == ['baz@example.net']
|
|
|
@ -1,10 +1,10 @@
|
||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
import shutil
|
import shutil
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from django.utils.six import BytesIO
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
from wcs import publisher
|
from wcs import publisher
|
||||||
|
|
||||||
|
@ -73,9 +73,9 @@ def test_xml_export():
|
||||||
test.store()
|
test.store()
|
||||||
test = Category.get(1)
|
test = Category.get(1)
|
||||||
|
|
||||||
assert '<name>Test</name>' in test.export_to_xml_string(include_id=True)
|
assert b'<name>Test</name>' in test.export_to_xml_string(include_id=True)
|
||||||
assert ' id="1"' in test.export_to_xml_string(include_id=True)
|
assert b' id="1"' in test.export_to_xml_string(include_id=True)
|
||||||
assert ' id="1"' not in test.export_to_xml_string(include_id=False)
|
assert b' id="1"' not in test.export_to_xml_string(include_id=False)
|
||||||
|
|
||||||
|
|
||||||
def test_xml_import():
|
def test_xml_import():
|
||||||
|
@ -86,7 +86,7 @@ def test_xml_import():
|
||||||
test.store()
|
test.store()
|
||||||
test = Category.get(1)
|
test = Category.get(1)
|
||||||
|
|
||||||
fd = StringIO(test.export_to_xml_string(include_id=True))
|
fd = BytesIO(test.export_to_xml_string(include_id=True))
|
||||||
test2 = Category.import_from_xml(fd, include_id=True)
|
test2 = Category.import_from_xml(fd, include_id=True)
|
||||||
assert test.id == test2.id
|
assert test.id == test2.id
|
||||||
assert test.name == test2.name
|
assert test.name == test2.name
|
||||||
|
@ -102,7 +102,7 @@ def test_load_old_pickle():
|
||||||
test.description = 'Hello world'
|
test.description = 'Hello world'
|
||||||
|
|
||||||
os.mkdir(os.path.join(pub.app_dir, 'categories'))
|
os.mkdir(os.path.join(pub.app_dir, 'categories'))
|
||||||
fd = file(os.path.join(pub.app_dir, 'categories', '1'), 'w')
|
fd = open(os.path.join(pub.app_dir, 'categories', '1'), 'wb')
|
||||||
pickle.dump(test, fd)
|
pickle.dump(test, fd)
|
||||||
fd.close()
|
fd.close()
|
||||||
|
|
||||||
|
|
|
@ -68,27 +68,27 @@ def test_command_exists():
|
||||||
def test_unknown_publisher_fails(pub):
|
def test_unknown_publisher_fails(pub):
|
||||||
with pytest.raises(CommandError) as excinfo:
|
with pytest.raises(CommandError) as excinfo:
|
||||||
call_command('convert_to_sql', '-d', 'unknown.net', '--database', 'foobar')
|
call_command('convert_to_sql', '-d', 'unknown.net', '--database', 'foobar')
|
||||||
assert excinfo.value.message == 'unknown tenant'
|
assert str(excinfo.value) == 'unknown tenant'
|
||||||
|
|
||||||
|
|
||||||
def test_failing_connection(pub):
|
def test_failing_connection(pub):
|
||||||
with pytest.raises(psycopg2.OperationalError) as excinfo:
|
with pytest.raises(psycopg2.OperationalError) as excinfo:
|
||||||
call_command('convert_to_sql', '-d', 'example.net', '--database', 'foobar', '--port', '666')
|
call_command('convert_to_sql', '-d', 'example.net', '--database', 'foobar', '--port', '666')
|
||||||
assert 'could not connect' in excinfo.value.message
|
assert 'could not connect' in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
def test_database_does_not_exist(pub):
|
def test_database_does_not_exist(pub):
|
||||||
new_database = 'test_%s' % random.randint(1000, 9999)
|
new_database = 'test_%s' % random.randint(1000, 9999)
|
||||||
with pytest.raises(psycopg2.OperationalError) as excinfo:
|
with pytest.raises(psycopg2.OperationalError) as excinfo:
|
||||||
call_command('convert_to_sql', '-d', 'example.net', '--database', new_database)
|
call_command('convert_to_sql', '-d', 'example.net', '--database', new_database)
|
||||||
assert 'exist' in excinfo.value.message # works for english + french postgresql
|
assert 'exist' in str(excinfo.value) # works for english + french postgresql
|
||||||
|
|
||||||
|
|
||||||
def test_already_migrated_fails():
|
def test_already_migrated_fails():
|
||||||
pub = create_temporary_pub(sql_mode=True)
|
pub = create_temporary_pub(sql_mode=True)
|
||||||
with pytest.raises(CommandError) as excinfo:
|
with pytest.raises(CommandError) as excinfo:
|
||||||
call_command('convert_to_sql', '-d', 'example.net', '--database', 'foobar')
|
call_command('convert_to_sql', '-d', 'example.net', '--database', 'foobar')
|
||||||
assert excinfo.value.message == 'tenant already using postgresql'
|
assert str(excinfo.value) == 'tenant already using postgresql'
|
||||||
cleanup_connection()
|
cleanup_connection()
|
||||||
force_connections_close()
|
force_connections_close()
|
||||||
clean_temporary_pub()
|
clean_temporary_pub()
|
||||||
|
|
|
@ -16,7 +16,6 @@ from wcs.qommon.management.commands.collectstatic import Command as CmdCollectSt
|
||||||
from wcs.qommon.management.commands.migrate import Command as CmdMigrate
|
from wcs.qommon.management.commands.migrate import Command as CmdMigrate
|
||||||
from wcs.qommon.management.commands.migrate_schemas import Command as CmdMigrateSchemas
|
from wcs.qommon.management.commands.migrate_schemas import Command as CmdMigrateSchemas
|
||||||
from wcs.qommon.management.commands.makemessages import Command as CmdMakeMessages
|
from wcs.qommon.management.commands.makemessages import Command as CmdMakeMessages
|
||||||
from wcs.ctl.process_bounce import CmdProcessBounce
|
|
||||||
from wcs.ctl.rebuild_indexes import rebuild_vhost_indexes
|
from wcs.ctl.rebuild_indexes import rebuild_vhost_indexes
|
||||||
from wcs.ctl.wipe_data import CmdWipeData
|
from wcs.ctl.wipe_data import CmdWipeData
|
||||||
from wcs.ctl.management.commands.runscript import Command as CmdRunScript
|
from wcs.ctl.management.commands.runscript import Command as CmdRunScript
|
||||||
|
@ -67,24 +66,6 @@ def test_migrate(two_pubs):
|
||||||
def test_migrate_schemas(two_pubs):
|
def test_migrate_schemas(two_pubs):
|
||||||
CmdMigrateSchemas().handle()
|
CmdMigrateSchemas().handle()
|
||||||
|
|
||||||
def test_get_bounce_addrs():
|
|
||||||
msg = MIMEText('Hello world')
|
|
||||||
assert CmdProcessBounce.get_bounce_addrs(msg) is None
|
|
||||||
|
|
||||||
msg = MIMEMultipart(_subtype='mixed')
|
|
||||||
msg.attach(MIMEText('Hello world'))
|
|
||||||
msg.attach(MIMEText('<p>Hello world</p>', _subtype='html'))
|
|
||||||
assert CmdProcessBounce.get_bounce_addrs(msg) is None
|
|
||||||
|
|
||||||
msg = MIMEText('Hello world')
|
|
||||||
msg['x-failed-recipients'] = 'foobar@localhost'
|
|
||||||
assert CmdProcessBounce.get_bounce_addrs(msg) == ['foobar@localhost']
|
|
||||||
|
|
||||||
msg = MIMEText('''failed addresses follow:
|
|
||||||
foobar@localhost
|
|
||||||
message text follows:''')
|
|
||||||
assert CmdProcessBounce.get_bounce_addrs(msg) == ['foobar@localhost']
|
|
||||||
|
|
||||||
def test_wipe_formdata(pub):
|
def test_wipe_formdata(pub):
|
||||||
form_1 = FormDef()
|
form_1 = FormDef()
|
||||||
form_1.name = 'example'
|
form_1.name = 'example'
|
||||||
|
|
|
@ -1,11 +1,15 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from cStringIO import StringIO
|
|
||||||
|
import codecs
|
||||||
import pytest
|
import pytest
|
||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
import shutil
|
||||||
import urlparse
|
|
||||||
|
from django.utils import six
|
||||||
|
from django.utils.six import StringIO
|
||||||
|
from django.utils.six.moves.urllib import parse as urlparse
|
||||||
|
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
from wcs import publisher
|
from wcs import publisher
|
||||||
|
@ -51,6 +55,7 @@ def requests_pub(request):
|
||||||
|
|
||||||
def test_item_field_python_datasource(requests_pub):
|
def test_item_field_python_datasource(requests_pub):
|
||||||
req = get_request()
|
req = get_request()
|
||||||
|
req.environ['REQUEST_METHOD'] = 'POST'
|
||||||
field = fields.ItemField()
|
field = fields.ItemField()
|
||||||
field.id = 1
|
field.id = 1
|
||||||
field.data_source = {
|
field.data_source = {
|
||||||
|
@ -144,8 +149,8 @@ def test_json_datasource(http_requests):
|
||||||
|
|
||||||
# invalid json file
|
# invalid json file
|
||||||
get_request().datasources_cache = {}
|
get_request().datasources_cache = {}
|
||||||
json_file = open(json_file_path, 'w')
|
json_file = open(json_file_path, 'wb')
|
||||||
json_file.write(u'foobar'.encode('zlib_codec'))
|
json_file.write(codecs.encode(b'foobar', 'zlib_codec'))
|
||||||
json_file.close()
|
json_file.close()
|
||||||
assert data_sources.get_items(datasource) == []
|
assert data_sources.get_items(datasource) == []
|
||||||
|
|
||||||
|
@ -278,7 +283,10 @@ def test_json_datasource_bad_url(http_requests, caplog):
|
||||||
datasource = {'type': 'json', 'value': 'http://remote.example.net/xml'}
|
datasource = {'type': 'json', 'value': 'http://remote.example.net/xml'}
|
||||||
assert data_sources.get_items(datasource) == []
|
assert data_sources.get_items(datasource) == []
|
||||||
assert 'Error reading JSON data source output' in caplog.records[-1].message
|
assert 'Error reading JSON data source output' in caplog.records[-1].message
|
||||||
assert 'No JSON object could be decoded' in caplog.records[-1].message
|
if six.PY2:
|
||||||
|
assert 'No JSON object could be decoded' in caplog.records[-1].message
|
||||||
|
else:
|
||||||
|
assert 'Expecting value:' in caplog.records[-1].message
|
||||||
|
|
||||||
datasource = {'type': 'json', 'value': 'http://remote.example.net/connection-error'}
|
datasource = {'type': 'json', 'value': 'http://remote.example.net/connection-error'}
|
||||||
assert data_sources.get_items(datasource) == []
|
assert data_sources.get_items(datasource) == []
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
import datetime
|
import datetime
|
||||||
import pytest
|
import pytest
|
||||||
import os
|
import os
|
||||||
from StringIO import StringIO
|
|
||||||
|
|
||||||
|
from django.utils.six import StringIO
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
from wcs.qommon.ezt import (Template, UnclosedBlocksError, UnmatchedEndError,
|
from wcs.qommon.ezt import (Template, UnclosedBlocksError, UnmatchedEndError,
|
||||||
UnmatchedElseError, ArgCountSyntaxError, _re_parse)
|
UnmatchedElseError, ArgCountSyntaxError, _re_parse)
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
import urlparse
|
|
||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
import urllib
|
|
||||||
|
|
||||||
|
from django.utils.encoding import force_bytes, force_text
|
||||||
|
from django.utils.six.moves.urllib import parse as urllib
|
||||||
|
from django.utils.six.moves.urllib import parse as urlparse
|
||||||
from quixote import cleanup, get_session_manager
|
from quixote import cleanup, get_session_manager
|
||||||
|
|
||||||
from utilities import get_app, create_temporary_pub
|
from utilities import get_app, create_temporary_pub
|
||||||
|
@ -48,7 +49,7 @@ PROFILE = {
|
||||||
|
|
||||||
|
|
||||||
def base64url_encode(v):
|
def base64url_encode(v):
|
||||||
return base64.urlsafe_b64encode(v).strip('=')
|
return base64.urlsafe_b64encode(force_bytes(v)).strip(b'=')
|
||||||
|
|
||||||
|
|
||||||
def setup_module(module):
|
def setup_module(module):
|
||||||
|
@ -109,7 +110,7 @@ def setup_fc_environment(pub):
|
||||||
|
|
||||||
def get_session(app):
|
def get_session(app):
|
||||||
try:
|
try:
|
||||||
session_id = app.cookies.values()[0]
|
session_id = list(app.cookies.values())[0]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
|
@ -134,7 +135,7 @@ def test_fc_login_page(caplog):
|
||||||
}
|
}
|
||||||
token_result = {
|
token_result = {
|
||||||
'access_token': 'abcd',
|
'access_token': 'abcd',
|
||||||
'id_token': '.%s.' % base64url_encode(json.dumps(id_token)),
|
'id_token': '.%s.' % force_text(base64url_encode(json.dumps(id_token))),
|
||||||
}
|
}
|
||||||
user_info_result = {
|
user_info_result = {
|
||||||
'sub': 'ymca',
|
'sub': 'ymca',
|
||||||
|
@ -195,7 +196,7 @@ def test_fc_login_page(caplog):
|
||||||
qs = urlparse.parse_qs(resp.location.split('?')[1])
|
qs = urlparse.parse_qs(resp.location.split('?')[1])
|
||||||
state = qs['state'][0]
|
state = qs['state'][0]
|
||||||
id_token['nonce'] = qs['nonce'][0]
|
id_token['nonce'] = qs['nonce'][0]
|
||||||
token_result['id_token'] = '.%s.' % base64url_encode(json.dumps(id_token))
|
token_result['id_token'] = '.%s.' % force_text(base64url_encode(json.dumps(id_token)))
|
||||||
|
|
||||||
with mock.patch('wcs.qommon.ident.franceconnect.http_post_request') as http_post_request, \
|
with mock.patch('wcs.qommon.ident.franceconnect.http_post_request') as http_post_request, \
|
||||||
mock.patch('wcs.qommon.ident.franceconnect.http_get_page') as http_get_page:
|
mock.patch('wcs.qommon.ident.franceconnect.http_get_page') as http_get_page:
|
||||||
|
@ -238,7 +239,7 @@ def test_fc_login_page(caplog):
|
||||||
qs = urlparse.parse_qs(resp.location.split('?')[1])
|
qs = urlparse.parse_qs(resp.location.split('?')[1])
|
||||||
state = qs['state'][0]
|
state = qs['state'][0]
|
||||||
id_token['nonce'] = qs['nonce'][0]
|
id_token['nonce'] = qs['nonce'][0]
|
||||||
token_result['id_token'] = '.%s.' % base64url_encode(json.dumps(id_token))
|
token_result['id_token'] = '.%s.' % force_text(base64url_encode(json.dumps(id_token)))
|
||||||
bad_user_info_result = {
|
bad_user_info_result = {
|
||||||
'sub': 'ymca2',
|
'sub': 'ymca2',
|
||||||
'given_name': 'John',
|
'given_name': 'John',
|
||||||
|
@ -254,7 +255,7 @@ def test_fc_login_page(caplog):
|
||||||
}))
|
}))
|
||||||
assert pub.user_class.count() == 1, 'an invalid user (no email) has been created'
|
assert pub.user_class.count() == 1, 'an invalid user (no email) has been created'
|
||||||
session = get_session(app)
|
session = get_session(app)
|
||||||
assert not session
|
assert (not session or not session.user)
|
||||||
|
|
||||||
|
|
||||||
def test_fc_settings():
|
def test_fc_settings():
|
||||||
|
@ -264,7 +265,7 @@ def test_fc_settings():
|
||||||
resp.forms[0]['methods$elementfc'].checked = True
|
resp.forms[0]['methods$elementfc'].checked = True
|
||||||
resp = resp.forms[0].submit().follow()
|
resp = resp.forms[0].submit().follow()
|
||||||
|
|
||||||
assert 'FranceConnect' in resp.body
|
assert 'FranceConnect' in resp.text
|
||||||
resp = resp.click('FranceConnect')
|
resp = resp.click('FranceConnect')
|
||||||
resp = resp.forms[0].submit('user_field_mappings$add_element')
|
resp = resp.forms[0].submit('user_field_mappings$add_element')
|
||||||
resp = resp.forms[0].submit('user_field_mappings$add_element')
|
resp = resp.forms[0].submit('user_field_mappings$add_element')
|
||||||
|
@ -321,7 +322,7 @@ def test_fc_settings_no_user_profile():
|
||||||
resp.forms[0]['methods$elementfc'].checked = True
|
resp.forms[0]['methods$elementfc'].checked = True
|
||||||
resp = resp.forms[0].submit().follow()
|
resp = resp.forms[0].submit().follow()
|
||||||
|
|
||||||
assert 'FranceConnect' in resp.body
|
assert 'FranceConnect' in resp.text
|
||||||
resp = resp.click('FranceConnect')
|
resp = resp.click('FranceConnect')
|
||||||
resp = resp.forms[0].submit('user_field_mappings$add_element')
|
resp = resp.forms[0].submit('user_field_mappings$add_element')
|
||||||
resp = resp.forms[0].submit('user_field_mappings$add_element')
|
resp = resp.forms[0].submit('user_field_mappings$add_element')
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -162,7 +162,7 @@ def test_file_field(pub):
|
||||||
formdef.store()
|
formdef.store()
|
||||||
formdata = formdef.data_class()()
|
formdata = formdef.data_class()()
|
||||||
upload = Upload('test.txt', 'text/plain', 'ascii')
|
upload = Upload('test.txt', 'text/plain', 'ascii')
|
||||||
upload.receive(['first line', 'second line'])
|
upload.receive([b'first line', b'second line'])
|
||||||
formdata.data = {'0': upload}
|
formdata.data = {'0': upload}
|
||||||
formdata.id = 1
|
formdata.id = 1
|
||||||
substvars = formdata.get_substitution_variables()
|
substvars = formdata.get_substitution_variables()
|
||||||
|
@ -477,8 +477,8 @@ def test_backoffice_field_varname(pub):
|
||||||
assert substvars.get('form_var_backoffice_blah') == 'test'
|
assert substvars.get('form_var_backoffice_blah') == 'test'
|
||||||
|
|
||||||
def test_workflow_data_file_url(pub):
|
def test_workflow_data_file_url(pub):
|
||||||
upload = Upload('test.txt', 'text/plain', 'ascii')
|
upload = PicklableUpload('test.txt', 'text/plain', 'ascii')
|
||||||
upload.receive(['first line', 'second line'])
|
upload.receive([b'first line', b'second line'])
|
||||||
|
|
||||||
formdata = formdef.data_class()()
|
formdata = formdef.data_class()()
|
||||||
formdata.store()
|
formdata.store()
|
||||||
|
@ -588,7 +588,7 @@ def variable_test_data(pub):
|
||||||
'11': '4',
|
'11': '4',
|
||||||
'12': '3.14',
|
'12': '3.14',
|
||||||
}
|
}
|
||||||
formdata.data['5'].receive(['hello world'])
|
formdata.data['5'].receive([b'hello world'])
|
||||||
formdata.geolocations = {'base': {'lat': 1, 'lon': 2}}
|
formdata.geolocations = {'base': {'lat': 1, 'lon': 2}}
|
||||||
formdata.store()
|
formdata.store()
|
||||||
pub.substitutions.feed(pub)
|
pub.substitutions.feed(pub)
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
import cPickle
|
|
||||||
import datetime
|
import datetime
|
||||||
import glob
|
import glob
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import pickle
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
import shutil
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from django.utils.six import StringIO
|
from django.utils.six import BytesIO
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
from wcs import fields
|
from wcs import fields
|
||||||
from wcs.formdef import FormDef, get_formdefs_of_all_kinds
|
from wcs.formdef import FormDef, get_formdefs_of_all_kinds
|
||||||
|
@ -160,8 +160,8 @@ def test_schema_with_date_variable(pub):
|
||||||
DateField(label='Test', type='date', varname='foo'))
|
DateField(label='Test', type='date', varname='foo'))
|
||||||
wf.store()
|
wf.store()
|
||||||
formdef.workflow_id = wf.id
|
formdef.workflow_id = wf.id
|
||||||
formdef.workflow_options = {'foo': time.gmtime(time.mktime((2016, 4, 2, 0, 0, 0, 0, 0, 0)))}
|
formdef.workflow_options = {'foo': datetime.datetime(2016, 4, 2).timetuple()}
|
||||||
assert json.loads(formdef.export_to_json())['options']['foo'].startswith('2016-04')
|
assert json.loads(formdef.export_to_json())['options']['foo'].startswith('2016-04-02')
|
||||||
|
|
||||||
def test_substitution_variables_object(pub):
|
def test_substitution_variables_object(pub):
|
||||||
formdef = FormDef()
|
formdef = FormDef()
|
||||||
|
@ -221,10 +221,10 @@ def test_internal_identifier_migration(pub):
|
||||||
formdef.fields = []
|
formdef.fields = []
|
||||||
formdef.store()
|
formdef.store()
|
||||||
|
|
||||||
obj = cPickle.load(open(formdef.get_object_filename()))
|
obj = pickle.load(open(formdef.get_object_filename(), 'rb'))
|
||||||
del obj.internal_identifier
|
del obj.internal_identifier
|
||||||
cPickle.dump(obj, open(formdef.get_object_filename(), 'w'))
|
pickle.dump(obj, open(formdef.get_object_filename(), 'wb'))
|
||||||
assert cPickle.load(open(formdef.get_object_filename())).internal_identifier is None
|
assert pickle.load(open(formdef.get_object_filename(), 'rb')).internal_identifier is None
|
||||||
assert FormDef.get(formdef.id, ignore_migration=True).internal_identifier is None
|
assert FormDef.get(formdef.id, ignore_migration=True).internal_identifier is None
|
||||||
|
|
||||||
formdef = FormDef.get(formdef.id)
|
formdef = FormDef.get(formdef.id)
|
||||||
|
@ -264,7 +264,7 @@ def test_unused_file_removal_job(pub):
|
||||||
formdata.data = {
|
formdata.data = {
|
||||||
'5': PicklableUpload('test.txt', 'text/plain'),
|
'5': PicklableUpload('test.txt', 'text/plain'),
|
||||||
}
|
}
|
||||||
formdata.data['5'].receive(['hello world'])
|
formdata.data['5'].receive([b'hello world'])
|
||||||
formdata.store()
|
formdata.store()
|
||||||
|
|
||||||
assert formdata.data['5'].qfilename in os.listdir(os.path.join(pub.app_dir, 'uploads'))
|
assert formdata.data['5'].qfilename in os.listdir(os.path.join(pub.app_dir, 'uploads'))
|
||||||
|
@ -285,7 +285,7 @@ def test_unused_file_removal_job(pub):
|
||||||
formdata.data = {
|
formdata.data = {
|
||||||
'5': PicklableUpload('test.txt', 'text/plain'),
|
'5': PicklableUpload('test.txt', 'text/plain'),
|
||||||
}
|
}
|
||||||
formdata.data['5'].receive(['hello world'])
|
formdata.data['5'].receive([b'hello world'])
|
||||||
formdata.store()
|
formdata.store()
|
||||||
|
|
||||||
assert formdata.data['5'].qfilename in os.listdir(os.path.join(pub.app_dir, 'uploads'))
|
assert formdata.data['5'].qfilename in os.listdir(os.path.join(pub.app_dir, 'uploads'))
|
||||||
|
@ -301,7 +301,7 @@ def test_unused_file_removal_job(pub):
|
||||||
formdata.data = {
|
formdata.data = {
|
||||||
'5': PicklableUpload('test.txt', 'text/plain'),
|
'5': PicklableUpload('test.txt', 'text/plain'),
|
||||||
}
|
}
|
||||||
formdata.data['5'].receive(['hello world'])
|
formdata.data['5'].receive([b'hello world'])
|
||||||
formdata.store()
|
formdata.store()
|
||||||
|
|
||||||
# same file, deduplicated
|
# same file, deduplicated
|
||||||
|
@ -324,7 +324,7 @@ def test_unused_file_removal_job(pub):
|
||||||
workflow.store()
|
workflow.store()
|
||||||
formdef.workflow = workflow
|
formdef.workflow = workflow
|
||||||
formdef.workflow_options = {'1': PicklableUpload('test.txt', 'text/plain')}
|
formdef.workflow_options = {'1': PicklableUpload('test.txt', 'text/plain')}
|
||||||
formdef.workflow_options['1'].receive(['hello world'])
|
formdef.workflow_options['1'].receive([b'hello world'])
|
||||||
formdef.store()
|
formdef.store()
|
||||||
|
|
||||||
formdata = formdef.data_class()()
|
formdata = formdef.data_class()()
|
||||||
|
@ -332,7 +332,7 @@ def test_unused_file_removal_job(pub):
|
||||||
formdata.data = {
|
formdata.data = {
|
||||||
'5': PicklableUpload('test.txt', 'text/plain'),
|
'5': PicklableUpload('test.txt', 'text/plain'),
|
||||||
}
|
}
|
||||||
formdata.data['5'].receive(['hello world'])
|
formdata.data['5'].receive([b'hello world'])
|
||||||
formdata.store()
|
formdata.store()
|
||||||
|
|
||||||
assert len(os.listdir(os.path.join(pub.app_dir, 'uploads'))) == 1
|
assert len(os.listdir(os.path.join(pub.app_dir, 'uploads'))) == 1
|
||||||
|
@ -353,7 +353,7 @@ def test_unused_file_removal_job(pub):
|
||||||
formdata.store()
|
formdata.store()
|
||||||
|
|
||||||
formdata.evolution[-1].parts = [AttachmentEvolutionPart('hello.txt',
|
formdata.evolution[-1].parts = [AttachmentEvolutionPart('hello.txt',
|
||||||
fp=StringIO('hello world'), varname='testfile')]
|
fp=BytesIO(b'hello world'), varname='testfile')]
|
||||||
formdata.store()
|
formdata.store()
|
||||||
assert len(glob.glob(os.path.join(pub.app_dir, 'attachments', '*/*'))) == 1
|
assert len(glob.glob(os.path.join(pub.app_dir, 'attachments', '*/*'))) == 1
|
||||||
clean_unused_files(pub)
|
clean_unused_files(pub)
|
||||||
|
@ -372,7 +372,7 @@ def test_unused_file_removal_job(pub):
|
||||||
user = pub.user_class()
|
user = pub.user_class()
|
||||||
user.email = 'bar@localhost'
|
user.email = 'bar@localhost'
|
||||||
user.form_data = {'3': PicklableUpload('test.txt', 'text/plain')}
|
user.form_data = {'3': PicklableUpload('test.txt', 'text/plain')}
|
||||||
user.form_data['3'].receive(['hello world 2'])
|
user.form_data['3'].receive([b'hello world 2'])
|
||||||
user.store()
|
user.store()
|
||||||
|
|
||||||
assert len(os.listdir(os.path.join(pub.app_dir, 'uploads'))) == 1
|
assert len(os.listdir(os.path.join(pub.app_dir, 'uploads'))) == 1
|
||||||
|
|
|
@ -3,10 +3,10 @@
|
||||||
import pytest
|
import pytest
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
import shutil
|
||||||
import StringIO
|
|
||||||
import time
|
import time
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
|
from django.utils.six import BytesIO, StringIO
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
|
|
||||||
from wcs.categories import Category
|
from wcs.categories import Category
|
||||||
|
@ -46,7 +46,7 @@ def assert_xml_import_export_works(formdef, include_id=False):
|
||||||
|
|
||||||
def assert_json_import_export_works(formdef, include_id=False):
|
def assert_json_import_export_works(formdef, include_id=False):
|
||||||
formdef2 = FormDef.import_from_json(
|
formdef2 = FormDef.import_from_json(
|
||||||
StringIO.StringIO(formdef.export_to_json(include_id=include_id)), include_id=include_id)
|
StringIO(formdef.export_to_json(include_id=include_id)), include_id=include_id)
|
||||||
assert_compare_formdef(formdef, formdef2, include_id=include_id)
|
assert_compare_formdef(formdef, formdef2, include_id=include_id)
|
||||||
return formdef2
|
return formdef2
|
||||||
|
|
||||||
|
@ -199,8 +199,8 @@ def test_workflow_options_with_file():
|
||||||
from wcs.qommon.form import UploadedFile
|
from wcs.qommon.form import UploadedFile
|
||||||
|
|
||||||
upload = Upload('/foo/bar', content_type='application/vnd.oasis.opendocument.text')
|
upload = Upload('/foo/bar', content_type='application/vnd.oasis.opendocument.text')
|
||||||
file_content = '''PK\x03\x04\x14\x00\x00\x08\x00\x00\'l\x8eG^\xc62\x0c\'\x00'''
|
file_content = b'''PK\x03\x04\x14\x00\x00\x08\x00\x00\'l\x8eG^\xc62\x0c\'\x00'''
|
||||||
upload.fp = StringIO.StringIO()
|
upload.fp = BytesIO()
|
||||||
upload.fp.write(file_content)
|
upload.fp.write(file_content)
|
||||||
upload.fp.seek(0)
|
upload.fp.seek(0)
|
||||||
model_file = UploadedFile(pub.APP_DIR, None, upload)
|
model_file = UploadedFile(pub.APP_DIR, None, upload)
|
||||||
|
@ -311,7 +311,7 @@ def test_invalid_field_type():
|
||||||
formdef.fields = [fields.StringField(id='1', type='XXX')]
|
formdef.fields = [fields.StringField(id='1', type='XXX')]
|
||||||
export = ET.tostring(export_to_indented_xml(formdef))
|
export = ET.tostring(export_to_indented_xml(formdef))
|
||||||
with pytest.raises(FormdefImportError):
|
with pytest.raises(FormdefImportError):
|
||||||
FormDef.import_from_xml(StringIO.StringIO(export), include_id=True)
|
FormDef.import_from_xml(BytesIO(export), include_id=True)
|
||||||
|
|
||||||
def test_unknown_data_source():
|
def test_unknown_data_source():
|
||||||
formdef = FormDef()
|
formdef = FormDef()
|
||||||
|
@ -320,13 +320,13 @@ def test_unknown_data_source():
|
||||||
data_source={'type': 'json', 'value': 'http://example.net'})]
|
data_source={'type': 'json', 'value': 'http://example.net'})]
|
||||||
export = ET.tostring(export_to_indented_xml(formdef))
|
export = ET.tostring(export_to_indented_xml(formdef))
|
||||||
|
|
||||||
FormDef.import_from_xml(StringIO.StringIO(export))
|
FormDef.import_from_xml(BytesIO(export))
|
||||||
|
|
||||||
formdef.fields = [fields.StringField(id='1', type='string',
|
formdef.fields = [fields.StringField(id='1', type='string',
|
||||||
data_source={'type': 'foobar'})]
|
data_source={'type': 'foobar'})]
|
||||||
export = ET.tostring(export_to_indented_xml(formdef))
|
export = ET.tostring(export_to_indented_xml(formdef))
|
||||||
with pytest.raises(FormdefImportError):
|
with pytest.raises(FormdefImportError):
|
||||||
FormDef.import_from_xml(StringIO.StringIO(export))
|
FormDef.import_from_xml(BytesIO(export))
|
||||||
|
|
||||||
def test_duplicated_field_ids():
|
def test_duplicated_field_ids():
|
||||||
formdef = FormDef()
|
formdef = FormDef()
|
||||||
|
@ -338,12 +338,12 @@ def test_duplicated_field_ids():
|
||||||
export = ET.tostring(export_to_indented_xml(formdef, include_id=True))
|
export = ET.tostring(export_to_indented_xml(formdef, include_id=True))
|
||||||
|
|
||||||
with pytest.raises(FormdefImportError):
|
with pytest.raises(FormdefImportError):
|
||||||
FormDef.import_from_xml(StringIO.StringIO(export))
|
FormDef.import_from_xml(BytesIO(export))
|
||||||
|
|
||||||
with pytest.raises(FormdefImportError):
|
with pytest.raises(FormdefImportError):
|
||||||
FormDef.import_from_xml(StringIO.StringIO(export), include_id=True)
|
FormDef.import_from_xml(BytesIO(export), include_id=True)
|
||||||
|
|
||||||
formdef2 = FormDef.import_from_xml(StringIO.StringIO(export), fix_on_error=True)
|
formdef2 = FormDef.import_from_xml(BytesIO(export), fix_on_error=True)
|
||||||
assert formdef2.fields[0].id == '1'
|
assert formdef2.fields[0].id == '1'
|
||||||
assert formdef2.fields[1].id == '2'
|
assert formdef2.fields[1].id == '2'
|
||||||
assert formdef2.fields[2].id == '3'
|
assert formdef2.fields[2].id == '3'
|
||||||
|
@ -357,7 +357,7 @@ def test_wrong_max_field_id():
|
||||||
formdef.max_field_id = 1
|
formdef.max_field_id = 1
|
||||||
export = ET.tostring(export_to_indented_xml(formdef, include_id=True))
|
export = ET.tostring(export_to_indented_xml(formdef, include_id=True))
|
||||||
|
|
||||||
formdef2 = FormDef.import_from_xml(StringIO.StringIO(export), include_id=True)
|
formdef2 = FormDef.import_from_xml(BytesIO(export), include_id=True)
|
||||||
assert formdef2.max_field_id == 2
|
assert formdef2.max_field_id == 2
|
||||||
|
|
||||||
def test_page_condition():
|
def test_page_condition():
|
||||||
|
@ -538,9 +538,9 @@ def test_field_validation():
|
||||||
# backward compatibility
|
# backward compatibility
|
||||||
formdef_xml = formdef.export_to_xml()
|
formdef_xml = formdef.export_to_xml()
|
||||||
old_format = ET.tostring(formdef_xml).replace(
|
old_format = ET.tostring(formdef_xml).replace(
|
||||||
'<validation><type>regex</type><value>\\d</value></validation>',
|
b'<validation><type>regex</type><value>\\d</value></validation>',
|
||||||
'<validation>\\d</validation>')
|
b'<validation>\\d</validation>')
|
||||||
f2 = FormDef.import_from_xml(StringIO.StringIO(old_format))
|
f2 = FormDef.import_from_xml(BytesIO(old_format))
|
||||||
assert len(f2.fields) == len(formdef.fields)
|
assert len(f2.fields) == len(formdef.fields)
|
||||||
assert f2.fields[0].validation == {'type': 'regex', 'value': '\\d'}
|
assert f2.fields[0].validation == {'type': 'regex', 'value': '\\d'}
|
||||||
|
|
||||||
|
|
|
@ -1,21 +1,22 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import collections
|
import collections
|
||||||
import copy
|
import copy
|
||||||
import cPickle
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import pickle
|
||||||
import pytest
|
import pytest
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import urllib2
|
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from utilities import create_temporary_pub, clean_temporary_pub
|
from utilities import create_temporary_pub, clean_temporary_pub
|
||||||
|
|
||||||
|
from django.utils.six.moves.urllib import parse as urlparse
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
|
|
||||||
|
from wcs.qommon import force_str
|
||||||
from wcs.ctl.check_hobos import CmdCheckHobos
|
from wcs.ctl.check_hobos import CmdCheckHobos
|
||||||
from wcs.publisher import WcsPublisher
|
from wcs.publisher import WcsPublisher
|
||||||
from wcs import fields
|
from wcs import fields
|
||||||
|
@ -240,7 +241,7 @@ def test_configure_site_options():
|
||||||
assert (pub.get_site_option('authentic.example.net', 'wscall-secrets')
|
assert (pub.get_site_option('authentic.example.net', 'wscall-secrets')
|
||||||
== CmdCheckHobos.shared_secret(HOBO_JSON['services'][1]['secret_key'],
|
== CmdCheckHobos.shared_secret(HOBO_JSON['services'][1]['secret_key'],
|
||||||
HOBO_JSON['services'][2]['secret_key']))
|
HOBO_JSON['services'][2]['secret_key']))
|
||||||
self_domain = urllib2.urlparse.urlsplit(service.get('base_url')).netloc
|
self_domain = urlparse.urlsplit(service.get('base_url')).netloc
|
||||||
assert pub.get_site_option(self_domain, 'wscall-secrets') != '0'
|
assert pub.get_site_option(self_domain, 'wscall-secrets') != '0'
|
||||||
|
|
||||||
def test_update_configuration():
|
def test_update_configuration():
|
||||||
|
@ -284,7 +285,7 @@ def test_update_profile():
|
||||||
hobo_cmd.update_profile(profile, pub)
|
hobo_cmd.update_profile(profile, pub)
|
||||||
from wcs.admin.settings import UserFieldsFormDef
|
from wcs.admin.settings import UserFieldsFormDef
|
||||||
formdef = UserFieldsFormDef(pub)
|
formdef = UserFieldsFormDef(pub)
|
||||||
field_labels = [x.get('label').encode('utf-8') for x in profile.get('fields') if not x.get('disabled')]
|
field_labels = [force_str(x.get('label')) for x in profile.get('fields') if not x.get('disabled')]
|
||||||
field_ids = [x.get('name') for x in profile.get('fields') if not x.get('disabled')]
|
field_ids = [x.get('name') for x in profile.get('fields') if not x.get('disabled')]
|
||||||
assert [x.label for x in formdef.fields] == field_labels
|
assert [x.label for x in formdef.fields] == field_labels
|
||||||
for field_id in [pub.cfg['users']['field_email']] + pub.cfg['users']['field_name']:
|
for field_id in [pub.cfg['users']['field_email']] + pub.cfg['users']['field_name']:
|
||||||
|
@ -373,14 +374,14 @@ def test_deploy():
|
||||||
|
|
||||||
# update
|
# update
|
||||||
cleanup()
|
cleanup()
|
||||||
pub_cfg = cPickle.load(open(os.path.join(alt_tempdir, 'wcs.example.net', 'config.pck')))
|
pub_cfg = pickle.load(open(os.path.join(alt_tempdir, 'wcs.example.net', 'config.pck'), 'rb'))
|
||||||
assert pub_cfg['language'] == {'language': 'fr'}
|
assert pub_cfg['language'] == {'language': 'fr'}
|
||||||
del pub_cfg['language']
|
del pub_cfg['language']
|
||||||
cPickle.dump(pub_cfg,
|
pickle.dump(pub_cfg,
|
||||||
open(os.path.join(alt_tempdir, 'wcs.example.net', 'config.pck'), 'w'))
|
open(os.path.join(alt_tempdir, 'wcs.example.net', 'config.pck'), 'wb'))
|
||||||
hobo_cmd.execute(base_options, sub_options,
|
hobo_cmd.execute(base_options, sub_options,
|
||||||
['http://wcs.example.net/', os.path.join(alt_tempdir, 'hobo.json')])
|
['http://wcs.example.net/', os.path.join(alt_tempdir, 'hobo.json')])
|
||||||
pub_cfg = cPickle.load(open(os.path.join(alt_tempdir, 'wcs.example.net', 'config.pck')))
|
pub_cfg = pickle.load(open(os.path.join(alt_tempdir, 'wcs.example.net', 'config.pck'), 'rb'))
|
||||||
assert pub_cfg['language'] == {'language': 'fr'}
|
assert pub_cfg['language'] == {'language': 'fr'}
|
||||||
|
|
||||||
def test_configure_postgresql():
|
def test_configure_postgresql():
|
||||||
|
@ -404,7 +405,7 @@ def test_configure_postgresql():
|
||||||
['http://wcs.example.net/', os.path.join(alt_tempdir, 'hobo.json')])
|
['http://wcs.example.net/', os.path.join(alt_tempdir, 'hobo.json')])
|
||||||
assert os.path.exists(os.path.join(alt_tempdir, 'wcs.example.net'))
|
assert os.path.exists(os.path.join(alt_tempdir, 'wcs.example.net'))
|
||||||
|
|
||||||
fd = file(os.path.join(alt_tempdir, 'wcs.example.net', 'site-options.cfg'), 'w')
|
fd = open(os.path.join(alt_tempdir, 'wcs.example.net', 'site-options.cfg'), 'w')
|
||||||
fd.write('[options]\n')
|
fd.write('[options]\n')
|
||||||
fd.write('postgresql = true\n')
|
fd.write('postgresql = true\n')
|
||||||
fd.close()
|
fd.close()
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
import shutil
|
import shutil
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
|
|
||||||
|
from wcs.qommon import force_str
|
||||||
from wcs.qommon.http_request import HTTPRequest
|
from wcs.qommon.http_request import HTTPRequest
|
||||||
from wcs.ctl.hobo_notify import CmdHoboNotify
|
from wcs.ctl.hobo_notify import CmdHoboNotify
|
||||||
from wcs.roles import Role
|
from wcs.roles import Role
|
||||||
|
@ -532,7 +533,7 @@ def test_process_notification_user_provision(pub):
|
||||||
assert user.form_data['_email'] == 'john.doe@example.net'
|
assert user.form_data['_email'] == 'john.doe@example.net'
|
||||||
assert user.email == 'john.doe@example.net'
|
assert user.email == 'john.doe@example.net'
|
||||||
assert user.form_data['_first_name'] == 'John'
|
assert user.form_data['_first_name'] == 'John'
|
||||||
assert user.form_data['_last_name'] == u'Doé'.encode('utf-8')
|
assert user.form_data['_last_name'] == force_str(u'Doé')
|
||||||
assert user.form_data['_zipcode'] == '13400'
|
assert user.form_data['_zipcode'] == '13400'
|
||||||
assert user.form_data['_birthdate'] is None
|
assert user.form_data['_birthdate'] is None
|
||||||
assert user.name_identifiers == ['a'*32]
|
assert user.name_identifiers == ['a'*32]
|
||||||
|
|
|
@ -9,6 +9,8 @@ import time
|
||||||
import datetime
|
import datetime
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
|
from django.utils import six
|
||||||
|
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
|
|
||||||
import wcs.api # workaround against circular dependencies :/
|
import wcs.api # workaround against circular dependencies :/
|
||||||
|
@ -18,7 +20,7 @@ from wcs.qommon.misc import (simplify, json_loads, parse_isotime, format_time,
|
||||||
date_format, get_as_datetime, normalize_geolocation)
|
date_format, get_as_datetime, normalize_geolocation)
|
||||||
from wcs.admin.settings import FileTypesDirectory
|
from wcs.admin.settings import FileTypesDirectory
|
||||||
from wcs.scripts import Script
|
from wcs.scripts import Script
|
||||||
from wcs.qommon import evalutils
|
from wcs.qommon import force_str, evalutils
|
||||||
from wcs.qommon.http_request import HTTPRequest
|
from wcs.qommon.http_request import HTTPRequest
|
||||||
from wcs.qommon.backoffice.listing import pagination_links
|
from wcs.qommon.backoffice.listing import pagination_links
|
||||||
from wcs.qommon.emails import email as send_email, docutils
|
from wcs.qommon.emails import email as send_email, docutils
|
||||||
|
@ -62,8 +64,8 @@ def test_humantime():
|
||||||
def test_parse_mimetypes():
|
def test_parse_mimetypes():
|
||||||
assert FileTypesDirectory.parse_mimetypes('application/pdf') == ['application/pdf']
|
assert FileTypesDirectory.parse_mimetypes('application/pdf') == ['application/pdf']
|
||||||
assert FileTypesDirectory.parse_mimetypes('.pdf') == ['application/pdf']
|
assert FileTypesDirectory.parse_mimetypes('.pdf') == ['application/pdf']
|
||||||
assert FileTypesDirectory.parse_mimetypes('.pdf, .odt') == [
|
assert set(FileTypesDirectory.parse_mimetypes('.pdf, .odt')) == set([
|
||||||
'application/pdf', 'application/vnd.oasis.opendocument.text']
|
'application/pdf', 'application/vnd.oasis.opendocument.text'])
|
||||||
|
|
||||||
def test_format_mimetypes():
|
def test_format_mimetypes():
|
||||||
assert FileTypesDirectory.format_mimetypes(['application/pdf']) == \
|
assert FileTypesDirectory.format_mimetypes(['application/pdf']) == \
|
||||||
|
@ -72,7 +74,8 @@ def test_format_mimetypes():
|
||||||
'application/pdf (.pdf), text/rtf'
|
'application/pdf (.pdf), text/rtf'
|
||||||
assert FileTypesDirectory.format_mimetypes(['application/pdf', 'application/msword']) in (
|
assert FileTypesDirectory.format_mimetypes(['application/pdf', 'application/msword']) in (
|
||||||
'application/pdf (.pdf), application/msword (.doc)',
|
'application/pdf (.pdf), application/msword (.doc)',
|
||||||
'application/pdf (.pdf), application/msword (.dot)')
|
'application/pdf (.pdf), application/msword (.dot)',
|
||||||
|
'application/pdf (.pdf), application/msword (.wiz)')
|
||||||
assert FileTypesDirectory.format_mimetypes(['application/pdf',
|
assert FileTypesDirectory.format_mimetypes(['application/pdf',
|
||||||
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
||||||
'application/msword']) == \
|
'application/msword']) == \
|
||||||
|
@ -97,7 +100,8 @@ def test_simplify_apostrophes():
|
||||||
|
|
||||||
def test_simplify_accented():
|
def test_simplify_accented():
|
||||||
assert simplify(u'cliché') == 'cliche'
|
assert simplify(u'cliché') == 'cliche'
|
||||||
assert simplify(u'cliché'.encode('iso-8859-1')) == 'cliche'
|
if six.PY2:
|
||||||
|
assert simplify(u'cliché'.encode('iso-8859-1')) == 'cliche'
|
||||||
|
|
||||||
def test_simplify_remove():
|
def test_simplify_remove():
|
||||||
assert simplify('this is: (a) "test"') == 'this-is-a-test'
|
assert simplify('this is: (a) "test"') == 'this-is-a-test'
|
||||||
|
@ -112,13 +116,14 @@ def test_json_str_decoder():
|
||||||
'lst': [{'a': 'b'}, 1, 2],
|
'lst': [{'a': 'b'}, 1, 2],
|
||||||
'bla': u'éléphant'
|
'bla': u'éléphant'
|
||||||
})
|
})
|
||||||
assert type(json.loads(json_str).keys()[0]) is unicode
|
if six.PY2:
|
||||||
assert type(json.loads(json_str)['lst'][0]['a']) is unicode
|
assert type(list(json.loads(json_str).keys())[0]) is unicode
|
||||||
|
assert type(json.loads(json_str)['lst'][0]['a']) is unicode
|
||||||
|
|
||||||
assert type(json_loads(json_str).keys()[0]) is str
|
assert type(list(json_loads(json_str).keys())[0]) is str
|
||||||
assert type(json_loads(json_str)['lst'][0]['a']) is str
|
assert type(json_loads(json_str)['lst'][0]['a']) is str
|
||||||
assert type(json_loads(json_str)['bla']) is str
|
assert type(json_loads(json_str)['bla']) is str
|
||||||
assert json_loads(json_str)['bla'] == u'éléphant'.encode('utf-8')
|
assert json_loads(json_str)['bla'] == force_str(u'éléphant')
|
||||||
|
|
||||||
def test_format_time():
|
def test_format_time():
|
||||||
assert format_time(None, '%(month_name)s') == '?'
|
assert format_time(None, '%(month_name)s') == '?'
|
||||||
|
@ -278,7 +283,7 @@ def test_email_signature_plain(emails):
|
||||||
send_email('test', mail_body='Hello', email_rcpt='test@localhost', want_html=False)
|
send_email('test', mail_body='Hello', email_rcpt='test@localhost', want_html=False)
|
||||||
assert emails.count() == 1
|
assert emails.count() == 1
|
||||||
assert not emails.emails['test']['msg'].is_multipart()
|
assert not emails.emails['test']['msg'].is_multipart()
|
||||||
assert 'Footer\nText' in emails.emails['test']['msg'].get_payload()
|
assert b'Footer\nText' in emails.emails['test']['msg'].get_payload(decode=True)
|
||||||
|
|
||||||
def test_email_from(emails):
|
def test_email_from(emails):
|
||||||
pub = create_temporary_pub()
|
pub = create_temporary_pub()
|
||||||
|
@ -298,7 +303,7 @@ def test_email_from(emails):
|
||||||
send_email('test', mail_body='Hello', email_rcpt='test@localhost', want_html=False)
|
send_email('test', mail_body='Hello', email_rcpt='test@localhost', want_html=False)
|
||||||
assert emails.count() == 1
|
assert emails.count() == 1
|
||||||
assert emails.emails['test']['from'] == 'foo@localhost'
|
assert emails.emails['test']['from'] == 'foo@localhost'
|
||||||
assert emails.emails['test']['msg']['From'] == '=?utf-8?q?HELLO?= <foo@localhost>'
|
assert emails.emails['test']['msg']['From'] in ('=?utf-8?q?HELLO?= <foo@localhost>', 'HELLO <foo@localhost>')
|
||||||
|
|
||||||
@pytest.mark.skipif('docutils is None')
|
@pytest.mark.skipif('docutils is None')
|
||||||
def test_email_signature_rst(emails):
|
def test_email_signature_rst(emails):
|
||||||
|
@ -310,8 +315,8 @@ def test_email_signature_rst(emails):
|
||||||
assert emails.emails['test']['msg'].get_content_subtype() == 'alternative'
|
assert emails.emails['test']['msg'].get_content_subtype() == 'alternative'
|
||||||
assert emails.emails['test']['msg'].get_payload()[0].get_content_type() == 'text/plain'
|
assert emails.emails['test']['msg'].get_payload()[0].get_content_type() == 'text/plain'
|
||||||
assert emails.emails['test']['msg'].get_payload()[1].get_content_type() == 'text/html'
|
assert emails.emails['test']['msg'].get_payload()[1].get_content_type() == 'text/html'
|
||||||
assert 'Footer\nText' in emails.emails['test']['msg'].get_payload()[0].get_payload()
|
assert b'Footer\nText' in emails.emails['test']['msg'].get_payload()[0].get_payload(decode=True)
|
||||||
assert '>Footer<' in emails.emails['test']['msg'].get_payload()[1].get_payload()
|
assert b'>Footer<' in emails.emails['test']['msg'].get_payload()[1].get_payload(decode=True)
|
||||||
|
|
||||||
@pytest.mark.skipif('docutils is None')
|
@pytest.mark.skipif('docutils is None')
|
||||||
def test_email_signature_rst_pipes(emails):
|
def test_email_signature_rst_pipes(emails):
|
||||||
|
@ -323,19 +328,19 @@ def test_email_signature_rst_pipes(emails):
|
||||||
assert emails.emails['test']['msg'].get_content_subtype() == 'alternative'
|
assert emails.emails['test']['msg'].get_content_subtype() == 'alternative'
|
||||||
assert emails.emails['test']['msg'].get_payload()[0].get_content_type() == 'text/plain'
|
assert emails.emails['test']['msg'].get_payload()[0].get_content_type() == 'text/plain'
|
||||||
assert emails.emails['test']['msg'].get_payload()[1].get_content_type() == 'text/html'
|
assert emails.emails['test']['msg'].get_payload()[1].get_content_type() == 'text/html'
|
||||||
assert 'Footer\nText' in emails.emails['test']['msg'].get_payload()[0].get_payload()
|
assert b'Footer\nText' in emails.emails['test']['msg'].get_payload()[0].get_payload(decode=True)
|
||||||
assert '>Footer<' in emails.emails['test']['msg'].get_payload()[1].get_payload()
|
assert b'>Footer<' in emails.emails['test']['msg'].get_payload()[1].get_payload(decode=True)
|
||||||
|
|
||||||
def test_email_plain_with_attachments(emails):
|
def test_email_plain_with_attachments(emails):
|
||||||
pub = create_temporary_pub()
|
pub = create_temporary_pub()
|
||||||
|
|
||||||
jpg = PicklableUpload('test.jpeg', 'image/jpeg')
|
jpg = PicklableUpload('test.jpeg', 'image/jpeg')
|
||||||
jpg_content = open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg')).read()
|
jpg_content = open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg'), 'rb').read()
|
||||||
jpg.receive([jpg_content])
|
jpg.receive([jpg_content])
|
||||||
txt = PicklableUpload('test.txt', 'text/plain')
|
txt = PicklableUpload('test.txt', 'text/plain')
|
||||||
txt.receive(['foo-text-bar'])
|
txt.receive([b'foo-text-bar'])
|
||||||
odt = PicklableUpload('test.odt', 'application/vnd.oasis.opendocument.text')
|
odt = PicklableUpload('test.odt', 'application/vnd.oasis.opendocument.text')
|
||||||
odt_content = open(os.path.join(os.path.dirname(__file__), 'template.odt')).read()
|
odt_content = open(os.path.join(os.path.dirname(__file__), 'template.odt'), 'rb').read()
|
||||||
odt.receive([odt_content])
|
odt.receive([odt_content])
|
||||||
|
|
||||||
send_email('jpg', mail_body='Hello',
|
send_email('jpg', mail_body='Hello',
|
||||||
|
@ -355,7 +360,7 @@ def test_email_plain_with_attachments(emails):
|
||||||
assert emails.emails['txt']['msg'].get_content_subtype() == 'mixed'
|
assert emails.emails['txt']['msg'].get_content_subtype() == 'mixed'
|
||||||
assert emails.emails['txt']['msg'].get_payload()[0].get_content_type() == 'text/plain'
|
assert emails.emails['txt']['msg'].get_payload()[0].get_content_type() == 'text/plain'
|
||||||
assert emails.emails['txt']['msg'].get_payload()[1].get_content_type() == 'text/plain'
|
assert emails.emails['txt']['msg'].get_payload()[1].get_content_type() == 'text/plain'
|
||||||
assert emails.emails['txt']['msg'].get_payload()[1].get_payload() == 'foo-text-bar'
|
assert emails.emails['txt']['msg'].get_payload()[1].get_payload(decode=True) == b'foo-text-bar'
|
||||||
|
|
||||||
send_email('jpgodt', mail_body='Hello',
|
send_email('jpgodt', mail_body='Hello',
|
||||||
email_rcpt='test@localhost', want_html=False,
|
email_rcpt='test@localhost', want_html=False,
|
||||||
|
@ -369,7 +374,7 @@ def test_email_plain_with_attachments(emails):
|
||||||
assert base64.b64decode(emails.emails['jpgodt']['msg'].get_payload()[2].get_payload()) == odt_content
|
assert base64.b64decode(emails.emails['jpgodt']['msg'].get_payload()[2].get_payload()) == odt_content
|
||||||
|
|
||||||
unknown = PicklableUpload('test.eo', 'x-foo/x-bar')
|
unknown = PicklableUpload('test.eo', 'x-foo/x-bar')
|
||||||
unknown.receive(['barfoo'])
|
unknown.receive([b'barfoo'])
|
||||||
send_email('unknown', mail_body='Hello',
|
send_email('unknown', mail_body='Hello',
|
||||||
email_rcpt='test@localhost', want_html=False,
|
email_rcpt='test@localhost', want_html=False,
|
||||||
attachments=[unknown])
|
attachments=[unknown])
|
||||||
|
@ -377,7 +382,7 @@ def test_email_plain_with_attachments(emails):
|
||||||
assert emails.emails['unknown']['msg'].get_content_subtype() == 'mixed'
|
assert emails.emails['unknown']['msg'].get_content_subtype() == 'mixed'
|
||||||
assert emails.emails['unknown']['msg'].get_payload()[0].get_content_type() == 'text/plain'
|
assert emails.emails['unknown']['msg'].get_payload()[0].get_content_type() == 'text/plain'
|
||||||
assert emails.emails['unknown']['msg'].get_payload()[1].get_content_type() == 'x-foo/x-bar'
|
assert emails.emails['unknown']['msg'].get_payload()[1].get_content_type() == 'x-foo/x-bar'
|
||||||
assert emails.emails['unknown']['msg'].get_payload()[1].get_payload() == base64.b64encode('barfoo')
|
assert emails.emails['unknown']['msg'].get_payload()[1].get_payload(decode=False).strip() == 'YmFyZm9v'
|
||||||
|
|
||||||
send_email('test-bad-attachment', mail_body='Hello',
|
send_email('test-bad-attachment', mail_body='Hello',
|
||||||
email_rcpt='test@localhost', want_html=False,
|
email_rcpt='test@localhost', want_html=False,
|
||||||
|
@ -391,7 +396,7 @@ def test_email_plain_and_html_with_attachments(emails):
|
||||||
pub = create_temporary_pub()
|
pub = create_temporary_pub()
|
||||||
pub.cfg['emails'] = {'footer': 'Footer\nText'}
|
pub.cfg['emails'] = {'footer': 'Footer\nText'}
|
||||||
jpg = PicklableUpload('test.jpeg', 'image/jpeg')
|
jpg = PicklableUpload('test.jpeg', 'image/jpeg')
|
||||||
jpg_content = open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg')).read()
|
jpg_content = open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg'), 'rb').read()
|
||||||
jpg.receive([jpg_content])
|
jpg.receive([jpg_content])
|
||||||
|
|
||||||
send_email('test', mail_body='Hello', email_rcpt='test@localhost', attachments=[jpg])
|
send_email('test', mail_body='Hello', email_rcpt='test@localhost', attachments=[jpg])
|
||||||
|
@ -447,16 +452,14 @@ M. Francis Kuntz
|
||||||
assert emails.emails['test']['msg'].get_content_subtype() == 'alternative'
|
assert emails.emails['test']['msg'].get_content_subtype() == 'alternative'
|
||||||
assert emails.emails['test']['msg'].get_payload()[0].get_content_type() == 'text/plain'
|
assert emails.emails['test']['msg'].get_payload()[0].get_content_type() == 'text/plain'
|
||||||
assert emails.emails['test']['msg'].get_payload()[1].get_content_type() == 'text/html'
|
assert emails.emails['test']['msg'].get_payload()[1].get_content_type() == 'text/html'
|
||||||
text = emails.emails['test']['msg'].get_payload()[0].get_payload()
|
text = emails.emails['test']['msg'].get_payload()[0].get_payload(decode=True)
|
||||||
html = emails.emails['test']['msg'].get_payload()[1].get_payload()
|
html = emails.emails['test']['msg'].get_payload()[1].get_payload(decode=True)
|
||||||
assert html.count('<ol') == 1
|
assert html.count(b'<ol') == 1
|
||||||
assert not '<ul' in html
|
assert not b'<ul' in html
|
||||||
assert 'arabic simple' in html
|
assert b'arabic simple' in html
|
||||||
assert 'M. Francis Kuntz' in html
|
assert b'M. Francis Kuntz' in html
|
||||||
|
|
||||||
def test_dict_from_prefix():
|
def test_dict_from_prefix():
|
||||||
hello_word_b64 = base64.encodestring('hello world')
|
|
||||||
|
|
||||||
d = evalutils.dict_from_prefix('var1', {})
|
d = evalutils.dict_from_prefix('var1', {})
|
||||||
assert d == {}
|
assert d == {}
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
import cPickle
|
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
import pickle
|
||||||
import shutil
|
import shutil
|
||||||
import StringIO
|
|
||||||
import os
|
import os
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
|
@ -15,6 +14,9 @@ from django.core.management import call_command
|
||||||
from django.core.management.base import CommandError
|
from django.core.management.base import CommandError
|
||||||
from django.http import Http404
|
from django.http import Http404
|
||||||
from django.test import override_settings
|
from django.test import override_settings
|
||||||
|
from django.utils import six
|
||||||
|
from django.utils.encoding import force_text
|
||||||
|
from django.utils.six import BytesIO, StringIO
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
from wcs.qommon import get_publisher_class
|
from wcs.qommon import get_publisher_class
|
||||||
from wcs.qommon.http_request import HTTPRequest
|
from wcs.qommon.http_request import HTTPRequest
|
||||||
|
@ -61,10 +63,16 @@ def test_plaintext_error():
|
||||||
assert not re.findall('^>.*\d+.*s = pub._generate_plaintext_error', s, re.MULTILINE)
|
assert not re.findall('^>.*\d+.*s = pub._generate_plaintext_error', s, re.MULTILINE)
|
||||||
|
|
||||||
def test_finish_failed_request():
|
def test_finish_failed_request():
|
||||||
|
pub.USE_LONG_TRACES = False
|
||||||
|
try:
|
||||||
|
raise Exception('foo')
|
||||||
|
except:
|
||||||
|
exc_type, exc_value, tb = sys.exc_info()
|
||||||
|
|
||||||
req = get_request()
|
req = get_request()
|
||||||
pub._set_request(req)
|
pub._set_request(req)
|
||||||
body = pub.finish_failed_request()
|
body = pub.finish_failed_request()
|
||||||
assert '<h1>Internal Server Error</h1>' in body
|
assert '<h1>Internal Server Error</h1>' in str(body)
|
||||||
|
|
||||||
req = get_request()
|
req = get_request()
|
||||||
pub._set_request(req)
|
pub._set_request(req)
|
||||||
|
@ -93,31 +101,31 @@ def test_finish_failed_request():
|
||||||
assert '<div class="error-page">' in str(body)
|
assert '<div class="error-page">' in str(body)
|
||||||
|
|
||||||
def test_finish_interrupted_request():
|
def test_finish_interrupted_request():
|
||||||
req = HTTPRequest(StringIO.StringIO(''), {
|
req = HTTPRequest(StringIO(''), {
|
||||||
'SERVER_NAME': 'example.net',
|
'SERVER_NAME': 'example.net',
|
||||||
'SCRIPT_NAME': '',
|
'SCRIPT_NAME': '',
|
||||||
'CONTENT_LENGTH': 'aaa',
|
'CONTENT_LENGTH': 'aaa',
|
||||||
})
|
})
|
||||||
response = pub.process_request(req)
|
response = pub.process_request(req)
|
||||||
assert 'invalid content-length header' in str(response)
|
assert b'invalid content-length header' in response.getvalue()
|
||||||
req = HTTPRequest(StringIO.StringIO(''), {
|
req = HTTPRequest(StringIO(''), {
|
||||||
'SERVER_NAME': 'example.net',
|
'SERVER_NAME': 'example.net',
|
||||||
'SCRIPT_NAME': '',
|
'SCRIPT_NAME': '',
|
||||||
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
||||||
'CONTENT_LENGTH': '1',
|
'CONTENT_LENGTH': '1',
|
||||||
})
|
})
|
||||||
response = pub.process_request(req)
|
response = pub.process_request(req)
|
||||||
assert 'Invalid request: unexpected end of request body' in str(response)
|
assert b'Invalid request: unexpected end of request body' in response.getvalue()
|
||||||
req = HTTPRequest(StringIO.StringIO(''), {
|
req = HTTPRequest(StringIO(''), {
|
||||||
'SERVER_NAME': 'example.net',
|
'SERVER_NAME': 'example.net',
|
||||||
'SCRIPT_NAME': '',
|
'SCRIPT_NAME': '',
|
||||||
'CONTENT_TYPE': 'multipart/form-data',
|
'CONTENT_TYPE': 'multipart/form-data',
|
||||||
'CONTENT_LENGTH': '1',
|
'CONTENT_LENGTH': '1',
|
||||||
})
|
})
|
||||||
response = pub.process_request(req)
|
response = pub.process_request(req)
|
||||||
assert 'Invalid request: multipart/form-data missing boundary' in str(response)
|
assert b'Invalid request: multipart/form-data missing boundary' in response.getvalue()
|
||||||
with pytest.raises(Http404):
|
with pytest.raises(Http404):
|
||||||
req = HTTPRequest(StringIO.StringIO(''), {
|
req = HTTPRequest(StringIO(''), {
|
||||||
'SERVER_NAME': 'example.net',
|
'SERVER_NAME': 'example.net',
|
||||||
'SCRIPT_NAME': '',
|
'SCRIPT_NAME': '',
|
||||||
'PATH_INFO': '/gloubiboulga',
|
'PATH_INFO': '/gloubiboulga',
|
||||||
|
@ -136,8 +144,8 @@ def test_get_tenants():
|
||||||
def test_register_cronjobs():
|
def test_register_cronjobs():
|
||||||
assert not pub.cronjobs
|
assert not pub.cronjobs
|
||||||
pub.register_cronjobs()
|
pub.register_cronjobs()
|
||||||
assert 'apply_global_action_timeouts' in [x.function.func_name for x in pub.cronjobs]
|
assert 'apply_global_action_timeouts' in [x.function.__name__ for x in pub.cronjobs]
|
||||||
assert 'clean_sessions' in [x.function.func_name for x in pub.cronjobs]
|
assert 'clean_sessions' in [x.function.__name__ for x in pub.cronjobs]
|
||||||
|
|
||||||
def test_get_default_position():
|
def test_get_default_position():
|
||||||
assert pub.get_default_position() == '50.84;4.36'
|
assert pub.get_default_position() == '50.84;4.36'
|
||||||
|
@ -147,9 +155,9 @@ def test_import_config_zip():
|
||||||
pub.cfg['sp'] = {'what': 'ever'}
|
pub.cfg['sp'] = {'what': 'ever'}
|
||||||
pub.write_cfg()
|
pub.write_cfg()
|
||||||
|
|
||||||
c = StringIO.StringIO()
|
c = BytesIO()
|
||||||
z = zipfile.ZipFile(c, 'w')
|
z = zipfile.ZipFile(c, 'w')
|
||||||
z.writestr('config.pck', cPickle.dumps(
|
z.writestr('config.pck', pickle.dumps(
|
||||||
{'language': {'language': 'fr'},
|
{'language': {'language': 'fr'},
|
||||||
'whatever': ['a', 'b', 'c']}))
|
'whatever': ['a', 'b', 'c']}))
|
||||||
z.close()
|
z.close()
|
||||||
|
@ -160,7 +168,7 @@ def test_import_config_zip():
|
||||||
assert pub.cfg['whatever'] == ['a', 'b', 'c']
|
assert pub.cfg['whatever'] == ['a', 'b', 'c']
|
||||||
assert pub.cfg['sp'] == {'what': 'ever'}
|
assert pub.cfg['sp'] == {'what': 'ever'}
|
||||||
|
|
||||||
c = StringIO.StringIO()
|
c = BytesIO()
|
||||||
z = zipfile.ZipFile(c, 'w')
|
z = zipfile.ZipFile(c, 'w')
|
||||||
z.writestr('config.json', json.dumps(
|
z.writestr('config.json', json.dumps(
|
||||||
{'language': {'language': 'en'},
|
{'language': {'language': 'en'},
|
||||||
|
@ -171,8 +179,9 @@ def test_import_config_zip():
|
||||||
pub.import_zip(c)
|
pub.import_zip(c)
|
||||||
assert pub.cfg['language'] == {'language': 'en'}
|
assert pub.cfg['language'] == {'language': 'en'}
|
||||||
assert pub.cfg['sp'] == {'what': 'ever'}
|
assert pub.cfg['sp'] == {'what': 'ever'}
|
||||||
assert not isinstance(pub.cfg['language'], unicode)
|
if six.PY2:
|
||||||
assert not isinstance(pub.cfg['whatever2'][-1]['c'], unicode)
|
assert not isinstance(pub.cfg['language'], unicode)
|
||||||
|
assert not isinstance(pub.cfg['whatever2'][-1]['c'], unicode)
|
||||||
|
|
||||||
def test_cron_command():
|
def test_cron_command():
|
||||||
pub = create_temporary_pub()
|
pub = create_temporary_pub()
|
||||||
|
|
|
@ -1,9 +1,12 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import hashlib
|
import hashlib
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
|
|
||||||
|
from wcs.qommon import force_str
|
||||||
from wcs.qommon.http_request import HTTPRequest
|
from wcs.qommon.http_request import HTTPRequest
|
||||||
from wcs.qommon.ident.password_accounts import PasswordAccount
|
from wcs.qommon.ident.password_accounts import PasswordAccount
|
||||||
|
|
||||||
|
@ -42,7 +45,7 @@ def test_no_user_registration(pub):
|
||||||
def test_link_on_login_page(pub):
|
def test_link_on_login_page(pub):
|
||||||
app = get_app(pub)
|
app = get_app(pub)
|
||||||
page = app.get('/login/')
|
page = app.get('/login/')
|
||||||
assert '/register/' in page.body
|
assert '/register/' in page.text
|
||||||
|
|
||||||
def test_no_password(pub):
|
def test_no_password(pub):
|
||||||
app = get_app(pub)
|
app = get_app(pub)
|
||||||
|
@ -61,7 +64,7 @@ def test_user_registration_mismatch(pub):
|
||||||
register_form['password$pwd1'] = 'bar'
|
register_form['password$pwd1'] = 'bar'
|
||||||
register_form['password$pwd2'] = 'baz'
|
register_form['password$pwd2'] = 'baz'
|
||||||
resp = register_form.submit()
|
resp = register_form.submit()
|
||||||
assert 'Passwords do not match' in resp.body
|
assert 'Passwords do not match' in resp.text
|
||||||
|
|
||||||
def do_user_registration(pub, username='foo', password='bar'):
|
def do_user_registration(pub, username='foo', password='bar'):
|
||||||
initial_user_count = pub.user_class.count()
|
initial_user_count = pub.user_class.count()
|
||||||
|
@ -104,14 +107,14 @@ def test_user_password_hashing(pub):
|
||||||
do_user_registration(pub)
|
do_user_registration(pub)
|
||||||
|
|
||||||
account = PasswordAccount.get('foo')
|
account = PasswordAccount.get('foo')
|
||||||
assert account.password == hashlib.sha256('bar').hexdigest()
|
assert account.password == hashlib.sha256(b'bar').hexdigest()
|
||||||
|
|
||||||
def test_user_password_accents(pub):
|
def test_user_password_accents(pub):
|
||||||
pub.user_class.wipe()
|
pub.user_class.wipe()
|
||||||
PasswordAccount.wipe()
|
PasswordAccount.wipe()
|
||||||
pub.cfg['passwords'] = {'generate': False, 'hashing_algo': None}
|
pub.cfg['passwords'] = {'generate': False, 'hashing_algo': None}
|
||||||
pub.write_cfg()
|
pub.write_cfg()
|
||||||
password = u'foo\u00eb'.encode('utf-8')
|
password = force_str(u'fooë')
|
||||||
do_user_registration(pub, password=password)
|
do_user_registration(pub, password=password)
|
||||||
|
|
||||||
account = PasswordAccount.get('foo')
|
account = PasswordAccount.get('foo')
|
||||||
|
@ -169,7 +172,7 @@ def test_user_login(pub):
|
||||||
resp.forms[0]['username'] = 'foo'
|
resp.forms[0]['username'] = 'foo'
|
||||||
resp.forms[0]['password'] = 'foo'
|
resp.forms[0]['password'] = 'foo'
|
||||||
resp = resp.forms[0].submit()
|
resp = resp.forms[0].submit()
|
||||||
assert 'Invalid credentials' in resp.body
|
assert 'Invalid credentials' in resp.text
|
||||||
|
|
||||||
# correct passwod
|
# correct passwod
|
||||||
app = get_app(pub)
|
app = get_app(pub)
|
||||||
|
@ -190,7 +193,7 @@ def test_forgotten(pub, emails):
|
||||||
|
|
||||||
app = get_app(pub)
|
app = get_app(pub)
|
||||||
resp = app.get('/login/')
|
resp = app.get('/login/')
|
||||||
assert '/ident/password/forgotten' in resp.body
|
assert '/ident/password/forgotten' in resp.text
|
||||||
|
|
||||||
resp = app.get('/ident/password/forgotten')
|
resp = app.get('/ident/password/forgotten')
|
||||||
resp.forms[0]['username'] = 'bar' # this account doesn't exist
|
resp.forms[0]['username'] = 'bar' # this account doesn't exist
|
||||||
|
@ -209,7 +212,7 @@ def test_forgotten(pub, emails):
|
||||||
resp = app.get('/ident/password/forgotten')
|
resp = app.get('/ident/password/forgotten')
|
||||||
resp.forms[0]['username'] = 'foo'
|
resp.forms[0]['username'] = 'foo'
|
||||||
resp = resp.forms[0].submit()
|
resp = resp.forms[0].submit()
|
||||||
assert 'A token for changing your password has been emailed to you.' in resp.body
|
assert 'A token for changing your password has been emailed to you.' in resp.text
|
||||||
|
|
||||||
assert emails.get('Change Password Request')
|
assert emails.get('Change Password Request')
|
||||||
assert emails.get('Change Password Request')['to'] == 'foo@localhost'
|
assert emails.get('Change Password Request')['to'] == 'foo@localhost'
|
||||||
|
@ -221,16 +224,16 @@ def test_forgotten(pub, emails):
|
||||||
|
|
||||||
# cancel request
|
# cancel request
|
||||||
resp = app.get(confirm_urls[1])
|
resp = app.get(confirm_urls[1])
|
||||||
assert 'Your request has been cancelled' in resp.body
|
assert 'Your request has been cancelled' in resp.text
|
||||||
|
|
||||||
resp = app.get(confirm_urls[1])
|
resp = app.get(confirm_urls[1])
|
||||||
assert 'The token you submitted does not exist' in resp.body
|
assert 'The token you submitted does not exist' in resp.text
|
||||||
|
|
||||||
# new forgotten request
|
# new forgotten request
|
||||||
resp = app.get('/ident/password/forgotten')
|
resp = app.get('/ident/password/forgotten')
|
||||||
resp.forms[0]['username'] = 'foo'
|
resp.forms[0]['username'] = 'foo'
|
||||||
resp = resp.forms[0].submit()
|
resp = resp.forms[0].submit()
|
||||||
assert 'A token for changing your password has been emailed to you.' in resp.body
|
assert 'A token for changing your password has been emailed to you.' in resp.text
|
||||||
|
|
||||||
body = emails.get('Change Password Request')['payload']
|
body = emails.get('Change Password Request')['payload']
|
||||||
confirm_urls = re.findall(r'http://.*\w', body)
|
confirm_urls = re.findall(r'http://.*\w', body)
|
||||||
|
@ -238,7 +241,7 @@ def test_forgotten(pub, emails):
|
||||||
assert 'a=cxlpw' in confirm_urls[1]
|
assert 'a=cxlpw' in confirm_urls[1]
|
||||||
|
|
||||||
resp = app.get(confirm_urls[0])
|
resp = app.get(confirm_urls[0])
|
||||||
assert 'New password sent by email' in resp.body
|
assert 'New password sent by email' in resp.text
|
||||||
assert emails.get('Your new password')
|
assert emails.get('Your new password')
|
||||||
|
|
||||||
# check new password is working
|
# check new password is working
|
||||||
|
@ -256,7 +259,7 @@ def test_forgotten(pub, emails):
|
||||||
resp = app.get('/ident/password/forgotten')
|
resp = app.get('/ident/password/forgotten')
|
||||||
resp.forms[0]['username'] = 'foo'
|
resp.forms[0]['username'] = 'foo'
|
||||||
resp = resp.forms[0].submit()
|
resp = resp.forms[0].submit()
|
||||||
assert 'A token for changing your password has been emailed to you.' in resp.body
|
assert 'A token for changing your password has been emailed to you.' in resp.text
|
||||||
|
|
||||||
body = emails.get('Change Password Request')['payload']
|
body = emails.get('Change Password Request')['payload']
|
||||||
confirm_urls = re.findall(r'http://.*\w', body)
|
confirm_urls = re.findall(r'http://.*\w', body)
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import cPickle
|
import pickle
|
||||||
|
|
||||||
from utilities import create_temporary_pub, clean_temporary_pub
|
from utilities import create_temporary_pub, clean_temporary_pub
|
||||||
|
|
||||||
|
@ -34,10 +34,10 @@ def test_migrate():
|
||||||
Role.wipe()
|
Role.wipe()
|
||||||
role = Role(name='Hello world')
|
role = Role(name='Hello world')
|
||||||
role.store()
|
role.store()
|
||||||
obj = cPickle.load(open(role.get_object_filename()))
|
obj = pickle.load(open(role.get_object_filename(), 'rb'))
|
||||||
del obj.slug
|
del obj.slug
|
||||||
cPickle.dump(obj, open(role.get_object_filename(), 'w'))
|
pickle.dump(obj, open(role.get_object_filename(), 'wb'))
|
||||||
assert cPickle.load(open(role.get_object_filename())).slug is None
|
assert pickle.load(open(role.get_object_filename(), 'rb')).slug is None
|
||||||
assert Role.get(role.id).slug == 'hello-world'
|
assert Role.get(role.id).slug == 'hello-world'
|
||||||
|
|
||||||
def test_get_user_roles():
|
def test_get_user_roles():
|
||||||
|
|
|
@ -157,15 +157,15 @@ def test_static_directories():
|
||||||
assert get_app(pub).get('/static/xstatic/jquery.js')
|
assert get_app(pub).get('/static/xstatic/jquery.js')
|
||||||
assert get_app(pub).get('/static/xstatic/jquery-ui.js')
|
assert get_app(pub).get('/static/xstatic/jquery-ui.js')
|
||||||
|
|
||||||
assert 'Directory listing denied' in get_app(pub).get('/static/css/').body
|
assert 'Directory listing denied' in get_app(pub).get('/static/css/').text
|
||||||
assert get_app(pub).get('/static/xxx', status=404)
|
assert get_app(pub).get('/static/xxx', status=404)
|
||||||
|
|
||||||
def test_jquery_debug_mode():
|
def test_jquery_debug_mode():
|
||||||
FormDef.wipe()
|
FormDef.wipe()
|
||||||
create_formdef()
|
create_formdef()
|
||||||
resp = get_app(pub).get('/category1/test-formdef-1/')
|
resp = get_app(pub).get('/category1/test-formdef-1/')
|
||||||
assert 'jquery.min.js' in resp.body
|
assert 'jquery.min.js' in resp.text
|
||||||
pub.cfg['debug'] = {'debug_mode': True}
|
pub.cfg['debug'] = {'debug_mode': True}
|
||||||
pub.write_cfg()
|
pub.write_cfg()
|
||||||
resp = get_app(pub).get('/category1/test-formdef-1/')
|
resp = get_app(pub).get('/category1/test-formdef-1/')
|
||||||
assert 'jquery.js' in resp.body
|
assert 'jquery.js' in resp.text
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
|
from django.utils import six
|
||||||
|
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
|
|
||||||
from wcs.qommon import x509utils
|
from wcs.qommon import x509utils
|
||||||
|
@ -26,6 +28,6 @@ def test_metadata_generation():
|
||||||
provider_id='provider_id_1')
|
provider_id='provider_id_1')
|
||||||
assert meta != None
|
assert meta != None
|
||||||
content = meta.get_saml2_metadata(pkey, '', True, True)
|
content = meta.get_saml2_metadata(pkey, '', True, True)
|
||||||
assert isinstance(content, str) and content != ''
|
assert isinstance(content, six.string_types) and content != ''
|
||||||
assert 'EntityDescriptor' in content
|
assert 'EntityDescriptor' in content
|
||||||
assert 'SPSSODescriptor' in content
|
assert 'SPSSODescriptor' in content
|
||||||
|
|
|
@ -2,7 +2,6 @@ import datetime
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
import shutil
|
||||||
import urlparse
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import lasso
|
import lasso
|
||||||
|
@ -11,6 +10,7 @@ except ImportError:
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from django.utils.six.moves.urllib import parse as urlparse
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
from quixote import get_session, get_session_manager
|
from quixote import get_session, get_session_manager
|
||||||
from wcs.qommon.http_request import HTTPRequest
|
from wcs.qommon.http_request import HTTPRequest
|
||||||
|
@ -79,17 +79,17 @@ def setup_idps(pub, idp_number=1):
|
||||||
'role': lasso.PROVIDER_ROLE_IDP,
|
'role': lasso.PROVIDER_ROLE_IDP,
|
||||||
}
|
}
|
||||||
filename = pub.cfg['idp'][base_id]['metadata']
|
filename = pub.cfg['idp'][base_id]['metadata']
|
||||||
fd = file(os.path.join(pub.app_dir, filename), 'w')
|
fd = open(os.path.join(pub.app_dir, filename), 'w')
|
||||||
fd.write(metadata)
|
fd.write(metadata)
|
||||||
fd.close()
|
fd.close()
|
||||||
|
|
||||||
filename = pub.cfg['idp'][base_id]['publickey']
|
filename = pub.cfg['idp'][base_id]['publickey']
|
||||||
fd = file(os.path.join(pub.app_dir, filename), 'w')
|
fd = open(os.path.join(pub.app_dir, filename), 'w')
|
||||||
fd.write(idp_publickey)
|
fd.write(idp_publickey)
|
||||||
fd.close()
|
fd.close()
|
||||||
|
|
||||||
filename = pub.cfg['idp'][base_id]['publickey'].replace('public', 'private')
|
filename = pub.cfg['idp'][base_id]['publickey'].replace('public', 'private')
|
||||||
fd = file(os.path.join(pub.app_dir, filename), 'w')
|
fd = open(os.path.join(pub.app_dir, filename), 'w')
|
||||||
fd.write(idp_privatekey)
|
fd.write(idp_privatekey)
|
||||||
fd.close()
|
fd.close()
|
||||||
|
|
||||||
|
|
|
@ -60,7 +60,7 @@ def app(pub):
|
||||||
|
|
||||||
|
|
||||||
def test_session_max_age(pub, user, app):
|
def test_session_max_age(pub, user, app):
|
||||||
with file(os.path.join(pub.app_dir, 'site-options.cfg'), 'w') as cfg:
|
with open(os.path.join(pub.app_dir, 'site-options.cfg'), 'w') as cfg:
|
||||||
cfg.write('''[options]
|
cfg.write('''[options]
|
||||||
session_max_age: 1
|
session_max_age: 1
|
||||||
''')
|
''')
|
||||||
|
@ -109,7 +109,7 @@ def test_sessions_visiting_objects(pub, http_request):
|
||||||
assert len(session1.visiting_objects.keys()) == 1
|
assert len(session1.visiting_objects.keys()) == 1
|
||||||
session1.store()
|
session1.store()
|
||||||
assert len(pub.get_visited_objects()) == 1
|
assert len(pub.get_visited_objects()) == 1
|
||||||
assert pub.get_visited_objects() == ['formdata-foobar-2']
|
assert list(pub.get_visited_objects()) == ['formdata-foobar-2']
|
||||||
|
|
||||||
# check with a second session
|
# check with a second session
|
||||||
session1.mark_visited_object('formdata-foobar-1')
|
session1.mark_visited_object('formdata-foobar-1')
|
||||||
|
@ -129,7 +129,7 @@ def test_sessions_visiting_objects(pub, http_request):
|
||||||
assert len(session1.visiting_objects.keys()) == 1
|
assert len(session1.visiting_objects.keys()) == 1
|
||||||
session1.store()
|
session1.store()
|
||||||
assert len(pub.get_visited_objects()) == 1
|
assert len(pub.get_visited_objects()) == 1
|
||||||
assert pub.get_visited_objects() == ['formdata-foobar-2']
|
assert list(pub.get_visited_objects()) == ['formdata-foobar-2']
|
||||||
|
|
||||||
# check with a second session
|
# check with a second session
|
||||||
session2 = pub.session_class(id='session2')
|
session2 = pub.session_class(id='session2')
|
||||||
|
@ -143,7 +143,7 @@ def test_sessions_visiting_objects(pub, http_request):
|
||||||
session2.store()
|
session2.store()
|
||||||
assert len(pub.get_visited_objects()) == 2
|
assert len(pub.get_visited_objects()) == 2
|
||||||
|
|
||||||
assert pub.get_visited_objects(exclude_user='BAR') == ['formdata-foobar-2']
|
assert list(pub.get_visited_objects(exclude_user='BAR')) == ['formdata-foobar-2']
|
||||||
|
|
||||||
# check visitors
|
# check visitors
|
||||||
assert set([x[0] for x in pub.get_object_visitors('formdata-foobar-2')]) == set(['FOO', 'BAR'])
|
assert set([x[0] for x in pub.get_object_visitors('formdata-foobar-2')]) == set(['FOO', 'BAR'])
|
||||||
|
@ -174,13 +174,13 @@ def test_session_substitution_variables(pub, user, app):
|
||||||
resp = app.get('/foobar/')
|
resp = app.get('/foobar/')
|
||||||
assert pub.session_manager.session_class.count() == 1
|
assert pub.session_manager.session_class.count() == 1
|
||||||
session_id = pub.session_manager.session_class.select()[0].id
|
session_id = pub.session_manager.session_class.select()[0].id
|
||||||
assert 'Hello %s' % session_id in resp.body
|
assert 'Hello %s' % session_id in resp.text
|
||||||
|
|
||||||
login(app, username='foo', password='foo')
|
login(app, username='foo', password='foo')
|
||||||
assert pub.session_manager.session_class.count() == 2
|
assert pub.session_manager.session_class.count() == 2
|
||||||
session_id = [x for x in pub.session_manager.session_class.select() if x.id != session_id][0].id
|
session_id = [x for x in pub.session_manager.session_class.select() if x.id != session_id][0].id
|
||||||
resp = app.get('/foobar/')
|
resp = app.get('/foobar/')
|
||||||
assert 'Hello %s' % session_id in resp.body
|
assert 'Hello %s' % session_id in resp.text
|
||||||
|
|
||||||
def test_session_substitution_variables_1st_page_condition(pub, user, app):
|
def test_session_substitution_variables_1st_page_condition(pub, user, app):
|
||||||
pub.session_manager.session_class.wipe()
|
pub.session_manager.session_class.wipe()
|
||||||
|
@ -198,4 +198,4 @@ def test_session_substitution_variables_1st_page_condition(pub, user, app):
|
||||||
resp = app.get('/foobar/')
|
resp = app.get('/foobar/')
|
||||||
assert pub.session_manager.session_class.count() == 1
|
assert pub.session_manager.session_class.count() == 1
|
||||||
session_id = pub.session_manager.session_class.select()[0].id
|
session_id = pub.session_manager.session_class.select()[0].id
|
||||||
assert 'COM1' in resp.body
|
assert 'COM1' in resp.text
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
|
@ -12,6 +14,7 @@ from django.core.management import call_command
|
||||||
|
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
|
|
||||||
|
from wcs.qommon import force_str
|
||||||
from wcs import formdef, publisher, fields
|
from wcs import formdef, publisher, fields
|
||||||
from wcs.formdef import FormDef
|
from wcs.formdef import FormDef
|
||||||
from wcs.formdata import Evolution
|
from wcs.formdata import Evolution
|
||||||
|
@ -762,11 +765,11 @@ def test_select_limit_offset():
|
||||||
assert len(data_class.select()) == 50
|
assert len(data_class.select()) == 50
|
||||||
for iterator in (False, True):
|
for iterator in (False, True):
|
||||||
for func_clause in (lambda x: True, None):
|
for func_clause in (lambda x: True, None):
|
||||||
assert [x.id for x in data_class.select(func_clause, order_by='id', iterator=iterator)] == range(1, 51)
|
assert [x.id for x in data_class.select(func_clause, order_by='id', iterator=iterator)] == list(range(1, 51))
|
||||||
assert [x.id for x in data_class.select(func_clause, order_by='id', limit=10, iterator=iterator)] == range(1, 11)
|
assert [x.id for x in data_class.select(func_clause, order_by='id', limit=10, iterator=iterator)] == list(range(1, 11))
|
||||||
assert [x.id for x in data_class.select(func_clause, order_by='id', limit=10, offset=10, iterator=iterator)] == range(11, 21)
|
assert [x.id for x in data_class.select(func_clause, order_by='id', limit=10, offset=10, iterator=iterator)] == list(range(11, 21))
|
||||||
assert [x.id for x in data_class.select(func_clause, order_by='id', limit=20, offset=20, iterator=iterator)] == range(21, 41)
|
assert [x.id for x in data_class.select(func_clause, order_by='id', limit=20, offset=20, iterator=iterator)] == list(range(21, 41))
|
||||||
assert [x.id for x in data_class.select(func_clause, order_by='id', offset=10, iterator=iterator)] == range(11, 51)
|
assert [x.id for x in data_class.select(func_clause, order_by='id', offset=10, iterator=iterator)] == list(range(11, 51))
|
||||||
assert len([x.id for x in data_class.select(lambda x: x.id > 10, limit=10, iterator=iterator)]) == 10
|
assert len([x.id for x in data_class.select(lambda x: x.id > 10, limit=10, iterator=iterator)]) == 10
|
||||||
|
|
||||||
@postgresql
|
@postgresql
|
||||||
|
@ -826,11 +829,11 @@ def test_select_criteria_or_and():
|
||||||
t = data_class()
|
t = data_class()
|
||||||
t.store()
|
t.store()
|
||||||
|
|
||||||
assert [x.id for x in data_class.select([st.Or([st.Less('id', 10)])], order_by='id')] == range(1, 10)
|
assert [x.id for x in data_class.select([st.Or([st.Less('id', 10)])], order_by='id')] == list(range(1, 10))
|
||||||
assert [x.id for x in data_class.select([st.Or([
|
assert [x.id for x in data_class.select([st.Or([
|
||||||
st.Less('id', 10), st.Equal('id', 15)])], order_by='id')] == range(1, 10) + [15]
|
st.Less('id', 10), st.Equal('id', 15)])], order_by='id')] == list(range(1, 10)) + [15]
|
||||||
assert [x.id for x in data_class.select([st.And([
|
assert [x.id for x in data_class.select([st.And([
|
||||||
st.Less('id', 10), st.Greater('id', 5)])], order_by='id')] == range(6, 10)
|
st.Less('id', 10), st.Greater('id', 5)])], order_by='id')] == list(range(6, 10))
|
||||||
|
|
||||||
@postgresql
|
@postgresql
|
||||||
def test_select_criteria_null():
|
def test_select_criteria_null():
|
||||||
|
@ -895,8 +898,8 @@ def test_sql_criteria_ilike():
|
||||||
assert data_class.count() == 50
|
assert data_class.count() == 50
|
||||||
assert len(data_class.select()) == 50
|
assert len(data_class.select()) == 50
|
||||||
|
|
||||||
assert [x.id for x in data_class.select([st.ILike('f3', 'bar')], order_by='id')] == range(21, 51)
|
assert [x.id for x in data_class.select([st.ILike('f3', 'bar')], order_by='id')] == list(range(21, 51))
|
||||||
assert [x.id for x in data_class.select([st.ILike('f3', 'BAR')], order_by='id')] == range(21, 51)
|
assert [x.id for x in data_class.select([st.ILike('f3', 'BAR')], order_by='id')] == list(range(21, 51))
|
||||||
|
|
||||||
@postgresql
|
@postgresql
|
||||||
def test_sql_criteria_fts():
|
def test_sql_criteria_fts():
|
||||||
|
@ -919,7 +922,7 @@ def test_sql_criteria_fts():
|
||||||
assert data_class.count() == 50
|
assert data_class.count() == 50
|
||||||
assert len(data_class.select()) == 50
|
assert len(data_class.select()) == 50
|
||||||
|
|
||||||
assert [x.id for x in data_class.select([st.FtsMatch('BAR')], order_by='id')] == range(21, 51)
|
assert [x.id for x in data_class.select([st.FtsMatch('BAR')], order_by='id')] == list(range(21, 51))
|
||||||
|
|
||||||
# check fts against data in history
|
# check fts against data in history
|
||||||
assert len(data_class.select([st.FtsMatch('XXX')])) == 0
|
assert len(data_class.select([st.FtsMatch('XXX')])) == 0
|
||||||
|
@ -949,7 +952,7 @@ def test_sql_criteria_fts():
|
||||||
|
|
||||||
# check unaccent
|
# check unaccent
|
||||||
user = sql.SqlUser()
|
user = sql.SqlUser()
|
||||||
user.name = u'Frédéric'.encode('utf-8')
|
user.name = force_str(u'Frédéric')
|
||||||
user.store()
|
user.store()
|
||||||
t.user_id = user.id
|
t.user_id = user.id
|
||||||
t.store()
|
t.store()
|
||||||
|
@ -1760,7 +1763,7 @@ def test_view_performances():
|
||||||
formdef.store()
|
formdef.store()
|
||||||
formdefs.append(formdef)
|
formdefs.append(formdef)
|
||||||
|
|
||||||
print 'create formdatas'
|
print('create formdatas')
|
||||||
# create formdatas
|
# create formdatas
|
||||||
for i in range(nb_formdatas):
|
for i in range(nb_formdatas):
|
||||||
data_class = random.choice(formdefs).data_class()
|
data_class = random.choice(formdefs).data_class()
|
||||||
|
@ -1775,7 +1778,7 @@ def test_view_performances():
|
||||||
formdata.jump_status('st%s' % (j+2))
|
formdata.jump_status('st%s' % (j+2))
|
||||||
if random.random() < 0.5:
|
if random.random() < 0.5:
|
||||||
break
|
break
|
||||||
print 'done'
|
print('done')
|
||||||
|
|
||||||
t0 = time.time()
|
t0 = time.time()
|
||||||
user_roles = [random.choice(roles).id, random.choice(roles).id]
|
user_roles = [random.choice(roles).id, random.choice(roles).id]
|
||||||
|
@ -1784,7 +1787,7 @@ def test_view_performances():
|
||||||
criterias.append(st.Equal('is_at_endpoint', False))
|
criterias.append(st.Equal('is_at_endpoint', False))
|
||||||
criterias.append(st.Intersects('actions_roles_array', user_roles))
|
criterias.append(st.Intersects('actions_roles_array', user_roles))
|
||||||
formdatas = sql.AnyFormData.select(criterias, order_by='receipt_time', limit=20, offset=0)
|
formdatas = sql.AnyFormData.select(criterias, order_by='receipt_time', limit=20, offset=0)
|
||||||
print time.time() - t0
|
print(time.time() - t0)
|
||||||
assert (time.time() - t0) < 0.5
|
assert (time.time() - t0) < 0.5
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -217,10 +217,10 @@ def test_select_order_by():
|
||||||
test.unique_value = 51-x
|
test.unique_value = 51-x
|
||||||
test.store()
|
test.store()
|
||||||
|
|
||||||
assert [int(x.id) for x in Foobar.select(order_by='id')] == range(1, 51)
|
assert [int(x.id) for x in Foobar.select(order_by='id')] == list(range(1, 51))
|
||||||
assert [int(x.id) for x in Foobar.select(order_by='-id')] == range(50, 0, -1)
|
assert [int(x.id) for x in Foobar.select(order_by='-id')] == list(range(50, 0, -1))
|
||||||
assert [int(x.id) for x in Foobar.select(order_by='unique_value')] == range(50, 0, -1)
|
assert [int(x.id) for x in Foobar.select(order_by='unique_value')] == list(range(50, 0, -1))
|
||||||
assert [int(x.id) for x in Foobar.select(order_by='-unique_value')] == range(1, 51)
|
assert [int(x.id) for x in Foobar.select(order_by='-unique_value')] == list(range(1, 51))
|
||||||
|
|
||||||
def test_select_datetime():
|
def test_select_datetime():
|
||||||
Foobar.wipe()
|
Foobar.wipe()
|
||||||
|
@ -249,10 +249,10 @@ def test_select_limit_offset():
|
||||||
test.store()
|
test.store()
|
||||||
|
|
||||||
assert len(Foobar.select()) == 50
|
assert len(Foobar.select()) == 50
|
||||||
assert [int(x.id) for x in Foobar.select(order_by='id', limit=10)] == range(1, 11)
|
assert [int(x.id) for x in Foobar.select(order_by='id', limit=10)] == list(range(1, 11))
|
||||||
assert [int(x.id) for x in Foobar.select(order_by='id', limit=10, offset=10)] == range(11, 21)
|
assert [int(x.id) for x in Foobar.select(order_by='id', limit=10, offset=10)] == list(range(11, 21))
|
||||||
assert [int(x.id) for x in Foobar.select(order_by='id', limit=20, offset=20)] == range(21, 41)
|
assert [int(x.id) for x in Foobar.select(order_by='id', limit=20, offset=20)] == list(range(21, 41))
|
||||||
assert [int(x.id) for x in Foobar.select(order_by='id', offset=10)] == range(11, 51)
|
assert [int(x.id) for x in Foobar.select(order_by='id', offset=10)] == list(range(11, 51))
|
||||||
|
|
||||||
|
|
||||||
def test_select_criteria_overlaps():
|
def test_select_criteria_overlaps():
|
||||||
|
@ -298,11 +298,11 @@ def test_select_criteria_or_and():
|
||||||
|
|
||||||
assert len(Foobar.select()) == 50
|
assert len(Foobar.select()) == 50
|
||||||
|
|
||||||
assert [int(x.id) for x in Foobar.select([st.Or([st.Less('value', 10)])], order_by='id')] == range(1, 10)
|
assert [int(x.id) for x in Foobar.select([st.Or([st.Less('value', 10)])], order_by='id')] == list(range(1, 10))
|
||||||
assert [int(x.id) for x in Foobar.select([st.Or([
|
assert [int(x.id) for x in Foobar.select([st.Or([
|
||||||
st.Less('value', 10), st.Equal('value', 15)])], order_by='value')] == range(1, 10) + [15]
|
st.Less('value', 10), st.Equal('value', 15)])], order_by='value')] == list(range(1, 10)) + [15]
|
||||||
assert [int(x.id) for x in Foobar.select([st.And([st.Less('value', 10),
|
assert [int(x.id) for x in Foobar.select([st.And([st.Less('value', 10),
|
||||||
st.Greater('value', 5)])], order_by='id')] == range(6, 10)
|
st.Greater('value', 5)])], order_by='id')] == list(range(6, 10))
|
||||||
|
|
||||||
def test_select_criteria_null():
|
def test_select_criteria_null():
|
||||||
Foobar.wipe()
|
Foobar.wipe()
|
||||||
|
@ -336,8 +336,8 @@ def test_select_criteria_ilike():
|
||||||
|
|
||||||
assert len(Foobar.select()) == 50
|
assert len(Foobar.select()) == 50
|
||||||
|
|
||||||
assert [int(x.id) for x in Foobar.select([st.ILike('foo', 'bar')], order_by='id')] == range(21, 50)
|
assert [int(x.id) for x in Foobar.select([st.ILike('foo', 'bar')], order_by='id')] == list(range(21, 50))
|
||||||
assert [int(x.id) for x in Foobar.select([st.ILike('foo', 'BAR')], order_by='id')] == range(21, 50)
|
assert [int(x.id) for x in Foobar.select([st.ILike('foo', 'BAR')], order_by='id')] == list(range(21, 50))
|
||||||
|
|
||||||
def test_store_async():
|
def test_store_async():
|
||||||
Foobar.wipe()
|
Foobar.wipe()
|
||||||
|
@ -366,7 +366,7 @@ def test_items():
|
||||||
test = Foobar()
|
test = Foobar()
|
||||||
test.store()
|
test.store()
|
||||||
|
|
||||||
assert sorted([(int(x), int(y.id)) for (x, y) in Foobar.items()]) == zip(range(1,51), range(1, 51))
|
assert sorted([(int(x), int(y.id)) for (x, y) in Foobar.items()]) == list(zip(range(1,51), range(1, 51)))
|
||||||
|
|
||||||
def test_reversed_order():
|
def test_reversed_order():
|
||||||
Foobar.wipe()
|
Foobar.wipe()
|
||||||
|
@ -376,7 +376,7 @@ def test_reversed_order():
|
||||||
test.store()
|
test.store()
|
||||||
|
|
||||||
assert len(Foobar.select()) == 50
|
assert len(Foobar.select()) == 50
|
||||||
assert [int(x.id) for x in Foobar.select(order_by='-id', limit=10)] == range(50, 40, -1)
|
assert [int(x.id) for x in Foobar.select(order_by='-id', limit=10)] == list(range(50, 40, -1))
|
||||||
|
|
||||||
def test_destroy_rebuild_index():
|
def test_destroy_rebuild_index():
|
||||||
test_get_with_indexed_value()
|
test_get_with_indexed_value()
|
||||||
|
|
|
@ -532,4 +532,6 @@ def test_reproj():
|
||||||
|
|
||||||
lazy_formdata = LazyFormData(MockFormData())
|
lazy_formdata = LazyFormData(MockFormData())
|
||||||
tmpl = Template('{% with form_geoloc_base|reproj:"EPSG:3946" as c %}{{c.0}}/{{c.1}}{% endwith %}')
|
tmpl = Template('{% with form_geoloc_base|reproj:"EPSG:3946" as c %}{{c.0}}/{{c.1}}{% endwith %}')
|
||||||
assert tmpl.render(CompatibilityNamesDict({'form': lazy_formdata})) == '1625337.15483/5422836.71627'
|
coords = tmpl.render(CompatibilityNamesDict({'form': lazy_formdata})).split('/')
|
||||||
|
assert int(float(coords[0])) == 1625337
|
||||||
|
assert int(float(coords[1])) == 5422836
|
||||||
|
|
|
@ -3,9 +3,9 @@
|
||||||
import pytest
|
import pytest
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
import shutil
|
||||||
import StringIO
|
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
|
from django.utils.six import BytesIO
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
from wcs import publisher
|
from wcs import publisher
|
||||||
|
|
||||||
|
@ -167,15 +167,15 @@ def test_status_actions_named_existing_role(pub):
|
||||||
commentable.parent = st1
|
commentable.parent = st1
|
||||||
|
|
||||||
wf2 = assert_import_export_works(wf)
|
wf2 = assert_import_export_works(wf)
|
||||||
assert '<item role_id="2">Test Role named existing role</item>' in ET.tostring(indent(wf.export_to_xml()))
|
assert b'<item role_id="2">Test Role named existing role</item>' in ET.tostring(indent(wf.export_to_xml()))
|
||||||
assert wf2.possible_status[0].items[0].by == ['2']
|
assert wf2.possible_status[0].items[0].by == ['2']
|
||||||
|
|
||||||
# check that it works even if the role_id is not set
|
# check that it works even if the role_id is not set
|
||||||
xml_export_orig = ET.tostring(export_to_indented_xml(wf))
|
xml_export_orig = ET.tostring(export_to_indented_xml(wf))
|
||||||
xml_export = xml_export_orig.replace(
|
xml_export = xml_export_orig.replace(
|
||||||
'<item role_id="2">Test Role named existing role</item>',
|
b'<item role_id="2">Test Role named existing role</item>',
|
||||||
'<item>Test Role named existing role</item>')
|
b'<item>Test Role named existing role</item>')
|
||||||
wf3 = Workflow.import_from_xml_tree(ET.parse(StringIO.StringIO(xml_export)))
|
wf3 = Workflow.import_from_xml_tree(ET.parse(BytesIO(xml_export)))
|
||||||
assert wf3.possible_status[0].items[0].by == ['2']
|
assert wf3.possible_status[0].items[0].by == ['2']
|
||||||
|
|
||||||
|
|
||||||
|
@ -204,30 +204,30 @@ def test_status_actions_named_missing_role(pub):
|
||||||
|
|
||||||
# check that role name has precedence over id
|
# check that role name has precedence over id
|
||||||
xml_export_orig = ET.tostring(export_to_indented_xml(wf))
|
xml_export_orig = ET.tostring(export_to_indented_xml(wf))
|
||||||
assert '<item role_id="3">Test Role A</item>' in xml_export_orig
|
assert b'<item role_id="3">Test Role A</item>' in xml_export_orig
|
||||||
xml_export = xml_export_orig.replace('<item role_id="3">Test Role A</item>',
|
xml_export = xml_export_orig.replace(b'<item role_id="3">Test Role A</item>',
|
||||||
'<item role_id="4">Test Role A</item>')
|
b'<item role_id="4">Test Role A</item>')
|
||||||
wf3 = Workflow.import_from_xml_tree(ET.parse(StringIO.StringIO(xml_export)))
|
wf3 = Workflow.import_from_xml_tree(ET.parse(BytesIO(xml_export)))
|
||||||
assert wf3.possible_status[0].items[0].by == ['3']
|
assert wf3.possible_status[0].items[0].by == ['3']
|
||||||
|
|
||||||
# check that it creates a new role if there's no match on id and name
|
# check that it creates a new role if there's no match on id and name
|
||||||
xml_export = xml_export_orig.replace('<item role_id="3">Test Role A</item>',
|
xml_export = xml_export_orig.replace(b'<item role_id="3">Test Role A</item>',
|
||||||
'<item role_id="999">foobar</item>')
|
b'<item role_id="999">foobar</item>')
|
||||||
nb_roles = Role.count()
|
nb_roles = Role.count()
|
||||||
wf3 = Workflow.import_from_xml_tree(ET.parse(StringIO.StringIO(xml_export)))
|
wf3 = Workflow.import_from_xml_tree(ET.parse(BytesIO(xml_export)))
|
||||||
assert Role.count() == nb_roles+1
|
assert Role.count() == nb_roles+1
|
||||||
|
|
||||||
# check that it doesn't fallback on the id if there's no match on the
|
# check that it doesn't fallback on the id if there's no match on the
|
||||||
# name
|
# name
|
||||||
nb_roles = Role.count()
|
nb_roles = Role.count()
|
||||||
xml_export = xml_export_orig.replace('<item role_id="3">Test Role A</item>',
|
xml_export = xml_export_orig.replace(b'<item role_id="3">Test Role A</item>',
|
||||||
'<item role_id="3">Test Role C</item>')
|
b'<item role_id="3">Test Role C</item>')
|
||||||
wf3 = Workflow.import_from_xml_tree(ET.parse(StringIO.StringIO(xml_export)))
|
wf3 = Workflow.import_from_xml_tree(ET.parse(BytesIO(xml_export)))
|
||||||
assert wf3.possible_status[0].items[0].by != ['3']
|
assert wf3.possible_status[0].items[0].by != ['3']
|
||||||
assert Role.count() == nb_roles+1
|
assert Role.count() == nb_roles+1
|
||||||
|
|
||||||
# on the other hand, check that it uses the id when included_id is True
|
# on the other hand, check that it uses the id when included_id is True
|
||||||
wf3 = Workflow.import_from_xml_tree(ET.parse(StringIO.StringIO(xml_export)),
|
wf3 = Workflow.import_from_xml_tree(ET.parse(BytesIO(xml_export)),
|
||||||
include_id=True)
|
include_id=True)
|
||||||
assert wf3.possible_status[0].items[0].by == ['3']
|
assert wf3.possible_status[0].items[0].by == ['3']
|
||||||
|
|
||||||
|
@ -263,8 +263,8 @@ def test_export_to_model_action(pub):
|
||||||
export_to = ExportToModel()
|
export_to = ExportToModel()
|
||||||
export_to.label = 'test'
|
export_to.label = 'test'
|
||||||
upload = Upload('/foo/bar', content_type='application/vnd.oasis.opendocument.text')
|
upload = Upload('/foo/bar', content_type='application/vnd.oasis.opendocument.text')
|
||||||
file_content = '''PK\x03\x04\x14\x00\x00\x08\x00\x00\'l\x8eG^\xc62\x0c\'\x00'''
|
file_content = b'''PK\x03\x04\x14\x00\x00\x08\x00\x00\'l\x8eG^\xc62\x0c\'\x00'''
|
||||||
upload.fp = StringIO.StringIO()
|
upload.fp = BytesIO()
|
||||||
upload.fp.write(file_content)
|
upload.fp.write(file_content)
|
||||||
upload.fp.seek(0)
|
upload.fp.seek(0)
|
||||||
export_to.model_file = UploadedFile(pub.APP_DIR, None, upload)
|
export_to.model_file = UploadedFile(pub.APP_DIR, None, upload)
|
||||||
|
@ -280,8 +280,8 @@ def test_export_to_model_action(pub):
|
||||||
export_to = ExportToModel()
|
export_to = ExportToModel()
|
||||||
export_to.label = 'test'
|
export_to.label = 'test'
|
||||||
upload = Upload('/foo/bar', content_type='text/rtf')
|
upload = Upload('/foo/bar', content_type='text/rtf')
|
||||||
file_content = ''
|
file_content = b''
|
||||||
upload.fp = StringIO.StringIO()
|
upload.fp = BytesIO()
|
||||||
upload.fp.write(file_content)
|
upload.fp.write(file_content)
|
||||||
upload.fp.seek(0)
|
upload.fp.seek(0)
|
||||||
export_to.model_file = UploadedFile(pub.APP_DIR, None, upload)
|
export_to.model_file = UploadedFile(pub.APP_DIR, None, upload)
|
||||||
|
@ -364,10 +364,10 @@ def test_commentable_action(pub):
|
||||||
|
|
||||||
# import legacy comment without required attribute
|
# import legacy comment without required attribute
|
||||||
xml_export = ET.tostring(export_to_indented_xml(wf))
|
xml_export = ET.tostring(export_to_indented_xml(wf))
|
||||||
assert '<required>True</required>' in xml_export
|
assert b'<required>True</required>' in xml_export
|
||||||
xml_export = xml_export.replace('<required>True</required>', '')
|
xml_export = xml_export.replace(b'<required>True</required>', b'')
|
||||||
assert '<required>True</required>' not in xml_export
|
assert b'<required>True</required>' not in xml_export
|
||||||
wf2 = Workflow.import_from_xml_tree(ET.parse(StringIO.StringIO(xml_export)))
|
wf2 = Workflow.import_from_xml_tree(ET.parse(BytesIO(xml_export)))
|
||||||
assert wf2.possible_status[0].items[0].required is False
|
assert wf2.possible_status[0].items[0].required is False
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -4,13 +4,14 @@ import os
|
||||||
import pytest
|
import pytest
|
||||||
import shutil
|
import shutil
|
||||||
import time
|
import time
|
||||||
import urllib2
|
|
||||||
import urlparse
|
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from django.utils.six import StringIO
|
from django.utils import six
|
||||||
|
from django.utils.encoding import force_bytes
|
||||||
|
from django.utils.six import BytesIO, StringIO
|
||||||
|
from django.utils.six.moves.urllib import parse as urlparse
|
||||||
|
|
||||||
from quixote import cleanup, get_response
|
from quixote import cleanup, get_response
|
||||||
from wcs.qommon.errors import ConnectionError
|
from wcs.qommon.errors import ConnectionError
|
||||||
|
@ -67,7 +68,7 @@ def pub(request):
|
||||||
pub.write_cfg()
|
pub.write_cfg()
|
||||||
req = HTTPRequest(None, {'SERVER_NAME': 'example.net', 'SCRIPT_NAME': ''})
|
req = HTTPRequest(None, {'SERVER_NAME': 'example.net', 'SCRIPT_NAME': ''})
|
||||||
req.response.filter = {}
|
req.response.filter = {}
|
||||||
req.user = None
|
req._user = None
|
||||||
pub._set_request(req)
|
pub._set_request(req)
|
||||||
req.session = sessions.BasicSession(id=1)
|
req.session = sessions.BasicSession(id=1)
|
||||||
pub.set_config(req)
|
pub.set_config(req)
|
||||||
|
@ -80,7 +81,7 @@ def two_pubs(request):
|
||||||
pub.write_cfg()
|
pub.write_cfg()
|
||||||
req = HTTPRequest(None, {'SERVER_NAME': 'example.net', 'SCRIPT_NAME': ''})
|
req = HTTPRequest(None, {'SERVER_NAME': 'example.net', 'SCRIPT_NAME': ''})
|
||||||
req.response.filter = {}
|
req.response.filter = {}
|
||||||
req.user = None
|
req._user = None
|
||||||
pub._set_request(req)
|
pub._set_request(req)
|
||||||
req.session = sessions.BasicSession(id=1)
|
req.session = sessions.BasicSession(id=1)
|
||||||
pub.set_config(req)
|
pub.set_config(req)
|
||||||
|
@ -877,7 +878,7 @@ def test_register_comment_attachment(pub):
|
||||||
shutil.rmtree(os.path.join(get_publisher().app_dir, 'attachments'))
|
shutil.rmtree(os.path.join(get_publisher().app_dir, 'attachments'))
|
||||||
|
|
||||||
formdata.evolution[-1].parts = [AttachmentEvolutionPart('hello.txt',
|
formdata.evolution[-1].parts = [AttachmentEvolutionPart('hello.txt',
|
||||||
fp=StringIO('hello world'), varname='testfile')]
|
fp=BytesIO(b'hello world'), varname='testfile')]
|
||||||
formdata.store()
|
formdata.store()
|
||||||
assert len(os.listdir(os.path.join(get_publisher().app_dir, 'attachments'))) == 1
|
assert len(os.listdir(os.path.join(get_publisher().app_dir, 'attachments'))) == 1
|
||||||
for subdir in os.listdir(os.path.join(get_publisher().app_dir, 'attachments')):
|
for subdir in os.listdir(os.path.join(get_publisher().app_dir, 'attachments')):
|
||||||
|
@ -937,7 +938,7 @@ def test_register_comment_with_attachment_file(pub):
|
||||||
wf.store()
|
wf.store()
|
||||||
|
|
||||||
upload = PicklableUpload('test.jpeg', 'image/jpeg')
|
upload = PicklableUpload('test.jpeg', 'image/jpeg')
|
||||||
jpg = open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg')).read()
|
jpg = open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg'), 'rb').read()
|
||||||
upload.receive([jpg])
|
upload.receive([jpg])
|
||||||
|
|
||||||
formdef = FormDef()
|
formdef = FormDef()
|
||||||
|
@ -992,9 +993,9 @@ def test_register_comment_with_attachment_file(pub):
|
||||||
|
|
||||||
ws_response_varname = 'ws_response_afile'
|
ws_response_varname = 'ws_response_afile'
|
||||||
wf_data = {
|
wf_data = {
|
||||||
'%s_filename' % ws_response_varname : 'hello.txt',
|
'%s_filename' % ws_response_varname: 'hello.txt',
|
||||||
'%s_content_type' % ws_response_varname : 'text/plain',
|
'%s_content_type' % ws_response_varname: 'text/plain',
|
||||||
'%s_b64_content' % ws_response_varname : base64.encodestring('hello world'),
|
'%s_b64_content' % ws_response_varname: base64.encodestring(b'hello world'),
|
||||||
}
|
}
|
||||||
formdata.update_workflow_data(wf_data)
|
formdata.update_workflow_data(wf_data)
|
||||||
formdata.store()
|
formdata.store()
|
||||||
|
@ -1260,7 +1261,7 @@ def test_email_attachments(pub, emails):
|
||||||
formdef.store()
|
formdef.store()
|
||||||
|
|
||||||
upload = PicklableUpload('test.jpeg', 'image/jpeg')
|
upload = PicklableUpload('test.jpeg', 'image/jpeg')
|
||||||
jpg = open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg')).read()
|
jpg = open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg'), 'rb').read()
|
||||||
upload.receive([jpg])
|
upload.receive([jpg])
|
||||||
formdata = formdef.data_class()()
|
formdata = formdef.data_class()()
|
||||||
formdata.data = {'3': upload}
|
formdata.data = {'3': upload}
|
||||||
|
@ -1369,7 +1370,10 @@ def test_email_attachments(pub, emails):
|
||||||
assert emails.emails['foobar']['msg'].get_payload()[0].get_content_type() == 'text/html'
|
assert emails.emails['foobar']['msg'].get_payload()[0].get_content_type() == 'text/html'
|
||||||
assert emails.emails['foobar']['msg'].get_payload()[1].get_content_type() == 'image/jpeg'
|
assert emails.emails['foobar']['msg'].get_payload()[1].get_content_type() == 'image/jpeg'
|
||||||
assert emails.emails['foobar']['msg'].get_payload()[2].get_content_type() == 'text/plain'
|
assert emails.emails['foobar']['msg'].get_payload()[2].get_content_type() == 'text/plain'
|
||||||
assert emails.emails['foobar']['msg'].get_payload()[2].get_payload() == 'blah'
|
if six.PY2:
|
||||||
|
assert emails.emails['foobar']['msg'].get_payload()[2].get_payload() == 'blah'
|
||||||
|
else:
|
||||||
|
assert base64.decodestring(force_bytes(emails.emails['foobar']['msg'].get_payload()[2].get_payload())) == b'blah'
|
||||||
assert len(emails.emails['foobar']['msg'].get_payload()) == 3
|
assert len(emails.emails['foobar']['msg'].get_payload()) == 3
|
||||||
|
|
||||||
emails.empty()
|
emails.empty()
|
||||||
|
@ -1387,8 +1391,8 @@ def test_email_attachments(pub, emails):
|
||||||
assert emails.emails['foobar']['msg'].get_payload(2).get_content_type() == 'application/json'
|
assert emails.emails['foobar']['msg'].get_payload(2).get_content_type() == 'application/json'
|
||||||
payload1 = emails.emails['foobar']['msg'].get_payload(1)
|
payload1 = emails.emails['foobar']['msg'].get_payload(1)
|
||||||
payload2 = emails.emails['foobar']['msg'].get_payload(2)
|
payload2 = emails.emails['foobar']['msg'].get_payload(2)
|
||||||
assert payload1.get_payload(decode=True) == "Hello world"
|
assert payload1.get_payload(decode=True) == b"Hello world"
|
||||||
assert json.loads(payload2.get_payload(decode=True)) == {'hello': 'world'}
|
assert json.loads(force_text(payload2.get_payload(decode=True))) == {'hello': 'world'}
|
||||||
|
|
||||||
|
|
||||||
def test_webservice_call(http_requests, pub):
|
def test_webservice_call(http_requests, pub):
|
||||||
|
@ -1597,7 +1601,10 @@ def test_webservice_call(http_requests, pub):
|
||||||
item.record_errors = True
|
item.record_errors = True
|
||||||
with pytest.raises(AbortActionException):
|
with pytest.raises(AbortActionException):
|
||||||
item.perform(formdata)
|
item.perform(formdata)
|
||||||
assert formdata.evolution[-1].parts[-1].summary == 'ValueError: No JSON object could be decoded\n'
|
if six.PY2:
|
||||||
|
assert formdata.evolution[-1].parts[-1].summary == 'ValueError: No JSON object could be decoded\n'
|
||||||
|
else:
|
||||||
|
assert formdata.evolution[-1].parts[-1].summary == 'json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)\n'
|
||||||
assert formdata.workflow_data.get('xxx_status') == 200
|
assert formdata.workflow_data.get('xxx_status') == 200
|
||||||
assert formdata.workflow_data.get('xxx_time')
|
assert formdata.workflow_data.get('xxx_time')
|
||||||
assert 'xxx_error_response' not in formdata.workflow_data
|
assert 'xxx_error_response' not in formdata.workflow_data
|
||||||
|
@ -1617,7 +1624,7 @@ def test_webservice_call(http_requests, pub):
|
||||||
assert attachment.base_filename == 'xxx.xml'
|
assert attachment.base_filename == 'xxx.xml'
|
||||||
assert attachment.content_type == 'text/xml'
|
assert attachment.content_type == 'text/xml'
|
||||||
attachment.fp.seek(0)
|
attachment.fp.seek(0)
|
||||||
assert attachment.fp.read(5) == '<?xml'
|
assert attachment.fp.read(5) == b'<?xml'
|
||||||
formdata.workflow_data = None
|
formdata.workflow_data = None
|
||||||
|
|
||||||
item = WebserviceCallStatusItem()
|
item = WebserviceCallStatusItem()
|
||||||
|
@ -1934,7 +1941,7 @@ def test_webservice_call_error_handling(http_requests, pub):
|
||||||
item.record_errors = True
|
item.record_errors = True
|
||||||
item.action_on_network_errors = ':pass'
|
item.action_on_network_errors = ':pass'
|
||||||
item.perform(formdata)
|
item.perform(formdata)
|
||||||
assert formdata.evolution[-1].parts[-1].summary == 'ConnectionError: error\n'
|
assert 'ConnectionError: error\n' in formdata.evolution[-1].parts[-1].summary
|
||||||
assert formdata.workflow_data['plop_connection_error'] == 'error'
|
assert formdata.workflow_data['plop_connection_error'] == 'error'
|
||||||
|
|
||||||
def test_webservice_call_store_in_backoffice_filefield(http_requests, pub):
|
def test_webservice_call_store_in_backoffice_filefield(http_requests, pub):
|
||||||
|
@ -1971,7 +1978,7 @@ def test_webservice_call_store_in_backoffice_filefield(http_requests, pub):
|
||||||
fbo1 = formdata.data['bo1']
|
fbo1 = formdata.data['bo1']
|
||||||
assert fbo1.base_filename == 'file-bo1.xml'
|
assert fbo1.base_filename == 'file-bo1.xml'
|
||||||
assert fbo1.content_type == 'text/xml'
|
assert fbo1.content_type == 'text/xml'
|
||||||
assert fbo1.get_content().startswith('<?xml')
|
assert fbo1.get_content().startswith(b'<?xml')
|
||||||
# nothing else is stored
|
# nothing else is stored
|
||||||
assert formdata.workflow_data is None
|
assert formdata.workflow_data is None
|
||||||
assert not formdata.evolution[-1].parts
|
assert not formdata.evolution[-1].parts
|
||||||
|
@ -1988,7 +1995,7 @@ def test_webservice_call_store_in_backoffice_filefield(http_requests, pub):
|
||||||
fbo1 = formdata.data['bo1']
|
fbo1 = formdata.data['bo1']
|
||||||
assert fbo1.base_filename == 'xxx.xml'
|
assert fbo1.base_filename == 'xxx.xml'
|
||||||
assert fbo1.content_type == 'text/xml'
|
assert fbo1.content_type == 'text/xml'
|
||||||
assert fbo1.get_content().startswith('<?xml')
|
assert fbo1.get_content().startswith(b'<?xml')
|
||||||
# varname => workflow_data and AttachmentEvolutionPart
|
# varname => workflow_data and AttachmentEvolutionPart
|
||||||
assert formdata.workflow_data.get('xxx_status') == 200
|
assert formdata.workflow_data.get('xxx_status') == 200
|
||||||
assert formdata.workflow_data.get('xxx_content_type') == 'text/xml'
|
assert formdata.workflow_data.get('xxx_content_type') == 'text/xml'
|
||||||
|
@ -1997,7 +2004,7 @@ def test_webservice_call_store_in_backoffice_filefield(http_requests, pub):
|
||||||
assert attachment.base_filename == 'xxx.xml'
|
assert attachment.base_filename == 'xxx.xml'
|
||||||
assert attachment.content_type == 'text/xml'
|
assert attachment.content_type == 'text/xml'
|
||||||
attachment.fp.seek(0)
|
attachment.fp.seek(0)
|
||||||
assert attachment.fp.read(5) == '<?xml'
|
assert attachment.fp.read(5) == b'<?xml'
|
||||||
|
|
||||||
# no more 'bo1' backoffice field: do nothing
|
# no more 'bo1' backoffice field: do nothing
|
||||||
formdata = formdef.data_class()()
|
formdata = formdef.data_class()()
|
||||||
|
@ -2294,11 +2301,12 @@ def test_display_form(two_pubs):
|
||||||
st1.items.append(display_form)
|
st1.items.append(display_form)
|
||||||
display_form.parent = st1
|
display_form.parent = st1
|
||||||
|
|
||||||
form = Form(action='#')
|
form = Form(action='#', use_tokens=False)
|
||||||
display_form.fill_form(form, formdata, None)
|
display_form.fill_form(form, formdata, None)
|
||||||
assert form.widgets[0].title == 'Test'
|
assert form.widgets[0].title == 'Test'
|
||||||
assert form.widgets[1].title == 'Date'
|
assert form.widgets[1].title == 'Date'
|
||||||
|
|
||||||
|
two_pubs.get_request().environ['REQUEST_METHOD'] = 'POST'
|
||||||
two_pubs.get_request().form = {'f1': 'Foobar', 'f2': '2015-05-12', 'submit': 'submit'}
|
two_pubs.get_request().form = {'f1': 'Foobar', 'f2': '2015-05-12', 'submit': 'submit'}
|
||||||
display_form.submit_form(form, formdata, None, None)
|
display_form.submit_form(form, formdata, None, None)
|
||||||
|
|
||||||
|
@ -2309,8 +2317,9 @@ def test_display_form(two_pubs):
|
||||||
formdata.just_created()
|
formdata.just_created()
|
||||||
formdata.store()
|
formdata.store()
|
||||||
|
|
||||||
form = Form(action='#')
|
form = Form(action='#', use_tokens=False)
|
||||||
display_form.fill_form(form, formdata, None)
|
display_form.fill_form(form, formdata, None)
|
||||||
|
two_pubs.get_request().environ['REQUEST_METHOD'] = 'POST'
|
||||||
two_pubs.get_request().form = {'f1': 'Foobar', 'f2': '12/05/2015', 'submit': 'submit'}
|
two_pubs.get_request().form = {'f1': 'Foobar', 'f2': '12/05/2015', 'submit': 'submit'}
|
||||||
display_form.submit_form(form, formdata, None, None)
|
display_form.submit_form(form, formdata, None, None)
|
||||||
assert formdata.get_substitution_variables()['xxx_var_date'] == '12/05/2015'
|
assert formdata.get_substitution_variables()['xxx_var_date'] == '12/05/2015'
|
||||||
|
@ -2517,7 +2526,7 @@ def test_workflow_display_message_to(pub):
|
||||||
assert display_message.get_message(formdata) == ''
|
assert display_message.get_message(formdata) == ''
|
||||||
assert formdata.get_workflow_messages() == []
|
assert formdata.get_workflow_messages() == []
|
||||||
|
|
||||||
pub._request.user = user
|
pub._request._user = user
|
||||||
display_message.message = 'to-role'
|
display_message.message = 'to-role'
|
||||||
display_message.to = [role.id]
|
display_message.to = [role.id]
|
||||||
assert display_message.get_message(formdata) == ''
|
assert display_message.get_message(formdata) == ''
|
||||||
|
@ -2710,7 +2719,7 @@ def test_geolocate_address(pub):
|
||||||
json.dumps([{'lat':'48.8337085','lon':'2.3233693'}]), None)
|
json.dumps([{'lat':'48.8337085','lon':'2.3233693'}]), None)
|
||||||
item.perform(formdata)
|
item.perform(formdata)
|
||||||
assert 'https://nominatim.entrouvert.org/search' in http_get_page.call_args[0][0]
|
assert 'https://nominatim.entrouvert.org/search' in http_get_page.call_args[0][0]
|
||||||
assert urllib2.quote('169 rue du chateau, paris') in http_get_page.call_args[0][0]
|
assert urlparse.quote('169 rue du chateau, paris') in http_get_page.call_args[0][0]
|
||||||
assert int(formdata.geolocations['base']['lat']) == 48
|
assert int(formdata.geolocations['base']['lat']) == 48
|
||||||
assert int(formdata.geolocations['base']['lon']) == 2
|
assert int(formdata.geolocations['base']['lon']) == 2
|
||||||
|
|
||||||
|
@ -2721,7 +2730,7 @@ def test_geolocate_address(pub):
|
||||||
json.dumps([{'lat':'48.8337085', 'lon':'2.3233693'}]), None)
|
json.dumps([{'lat':'48.8337085', 'lon':'2.3233693'}]), None)
|
||||||
item.perform(formdata)
|
item.perform(formdata)
|
||||||
assert 'https://nominatim.entrouvert.org/search' in http_get_page.call_args[0][0]
|
assert 'https://nominatim.entrouvert.org/search' in http_get_page.call_args[0][0]
|
||||||
assert urllib2.quote('169 rue du chateau, paris') in http_get_page.call_args[0][0]
|
assert urlparse.quote('169 rue du chateau, paris') in http_get_page.call_args[0][0]
|
||||||
assert 'key=KEY' in http_get_page.call_args[0][0]
|
assert 'key=KEY' in http_get_page.call_args[0][0]
|
||||||
assert int(formdata.geolocations['base']['lat']) == 48
|
assert int(formdata.geolocations['base']['lat']) == 48
|
||||||
assert int(formdata.geolocations['base']['lon']) == 2
|
assert int(formdata.geolocations['base']['lon']) == 2
|
||||||
|
@ -2792,7 +2801,7 @@ def test_geolocate_image(pub):
|
||||||
formdef.store()
|
formdef.store()
|
||||||
|
|
||||||
upload = PicklableUpload('test.jpeg', 'image/jpeg')
|
upload = PicklableUpload('test.jpeg', 'image/jpeg')
|
||||||
upload.receive([open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg')).read()])
|
upload.receive([open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg'), 'rb').read()])
|
||||||
|
|
||||||
formdata = formdef.data_class()()
|
formdata = formdef.data_class()()
|
||||||
formdata.data = {'3': upload}
|
formdata.data = {'3': upload}
|
||||||
|
@ -2822,7 +2831,7 @@ def test_geolocate_image(pub):
|
||||||
|
|
||||||
# invalid photo
|
# invalid photo
|
||||||
upload = PicklableUpload('test.jpeg', 'image/jpeg')
|
upload = PicklableUpload('test.jpeg', 'image/jpeg')
|
||||||
upload.receive([open(os.path.join(os.path.dirname(__file__), 'template.odt')).read()])
|
upload.receive([open(os.path.join(os.path.dirname(__file__), 'template.odt'), 'rb').read()])
|
||||||
formdata.data = {'3': upload}
|
formdata.data = {'3': upload}
|
||||||
formdata.geolocations = None
|
formdata.geolocations = None
|
||||||
item.perform(formdata)
|
item.perform(formdata)
|
||||||
|
@ -2893,10 +2902,10 @@ def test_geolocate_overwrite(pub):
|
||||||
|
|
||||||
@pytest.mark.skipif(transform_to_pdf is None, reason='libreoffice not found')
|
@pytest.mark.skipif(transform_to_pdf is None, reason='libreoffice not found')
|
||||||
def test_transform_to_pdf():
|
def test_transform_to_pdf():
|
||||||
instream = open(os.path.join(os.path.dirname(__file__), 'template.odt'))
|
instream = open(os.path.join(os.path.dirname(__file__), 'template.odt'), 'rb')
|
||||||
outstream = transform_to_pdf(instream)
|
outstream = transform_to_pdf(instream)
|
||||||
assert outstream is not False
|
assert outstream is not False
|
||||||
assert outstream.read(10).startswith('%PDF-')
|
assert outstream.read(10).startswith(b'%PDF-')
|
||||||
|
|
||||||
def test_export_to_model_image(pub):
|
def test_export_to_model_image(pub):
|
||||||
formdef = FormDef()
|
formdef = FormDef()
|
||||||
|
@ -2907,7 +2916,7 @@ def test_export_to_model_image(pub):
|
||||||
formdef.store()
|
formdef.store()
|
||||||
|
|
||||||
upload = PicklableUpload('test.jpeg', 'image/jpeg')
|
upload = PicklableUpload('test.jpeg', 'image/jpeg')
|
||||||
image_data = open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg')).read()
|
image_data = open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg'), 'rb').read()
|
||||||
upload.receive([image_data])
|
upload.receive([image_data])
|
||||||
|
|
||||||
formdata = formdef.data_class()()
|
formdata = formdef.data_class()()
|
||||||
|
@ -2920,9 +2929,9 @@ def test_export_to_model_image(pub):
|
||||||
item.convert_to_pdf = False
|
item.convert_to_pdf = False
|
||||||
item.method = 'non-interactive'
|
item.method = 'non-interactive'
|
||||||
template_filename = os.path.join(os.path.dirname(__file__), 'template-with-image.odt')
|
template_filename = os.path.join(os.path.dirname(__file__), 'template-with-image.odt')
|
||||||
template = open(template_filename).read()
|
template = open(template_filename, 'rb').read()
|
||||||
upload = QuixoteUpload('/foo/template.odt', content_type='application/octet-stream')
|
upload = QuixoteUpload('/foo/template.odt', content_type='application/octet-stream')
|
||||||
upload.fp = StringIO()
|
upload.fp = BytesIO()
|
||||||
upload.fp.write(template)
|
upload.fp.write(template)
|
||||||
upload.fp.seek(0)
|
upload.fp.seek(0)
|
||||||
item.model_file = UploadedFile(pub.app_dir, None, upload)
|
item.model_file = UploadedFile(pub.app_dir, None, upload)
|
||||||
|
@ -2981,9 +2990,9 @@ def test_export_to_model_backoffice_field(pub):
|
||||||
item.method = 'non-interactive'
|
item.method = 'non-interactive'
|
||||||
item.convert_to_pdf = False
|
item.convert_to_pdf = False
|
||||||
template_filename = os.path.join(os.path.dirname(__file__), 'template.odt')
|
template_filename = os.path.join(os.path.dirname(__file__), 'template.odt')
|
||||||
template = open(template_filename).read()
|
template = open(template_filename, 'rb').read()
|
||||||
upload = QuixoteUpload('/foo/template.odt', content_type='application/octet-stream')
|
upload = QuixoteUpload('/foo/template.odt', content_type='application/octet-stream')
|
||||||
upload.fp = StringIO()
|
upload.fp = BytesIO()
|
||||||
upload.fp.write(template)
|
upload.fp.write(template)
|
||||||
upload.fp.seek(0)
|
upload.fp.seek(0)
|
||||||
item.model_file = UploadedFile(pub.app_dir, None, upload)
|
item.model_file = UploadedFile(pub.app_dir, None, upload)
|
||||||
|
@ -2996,7 +3005,7 @@ def test_export_to_model_backoffice_field(pub):
|
||||||
assert fbo1.base_filename == 'template.odt'
|
assert fbo1.base_filename == 'template.odt'
|
||||||
assert fbo1.content_type == 'application/octet-stream'
|
assert fbo1.content_type == 'application/octet-stream'
|
||||||
zfile = zipfile.ZipFile(fbo1.get_file())
|
zfile = zipfile.ZipFile(fbo1.get_file())
|
||||||
assert 'foo-export-to-bofile' in zfile.read('content.xml')
|
assert b'foo-export-to-bofile' in zfile.read('content.xml')
|
||||||
|
|
||||||
# no more 'bo1' backoffice field: do nothing
|
# no more 'bo1' backoffice field: do nothing
|
||||||
formdata = formdef.data_class()()
|
formdata = formdef.data_class()()
|
||||||
|
@ -3038,31 +3047,31 @@ def test_export_to_model_django_template(pub):
|
||||||
item.method = 'non-interactive'
|
item.method = 'non-interactive'
|
||||||
item.attach_to_history = True
|
item.attach_to_history = True
|
||||||
template_filename = os.path.join(os.path.dirname(__file__), 'template-django.odt')
|
template_filename = os.path.join(os.path.dirname(__file__), 'template-django.odt')
|
||||||
template = open(template_filename).read()
|
template = open(template_filename, 'rb').read()
|
||||||
upload = QuixoteUpload('/foo/template-django.odt', content_type='application/octet-stream')
|
upload = QuixoteUpload('/foo/template-django.odt', content_type='application/octet-stream')
|
||||||
upload.fp = StringIO()
|
upload.fp = BytesIO()
|
||||||
upload.fp.write(template)
|
upload.fp.write(template)
|
||||||
upload.fp.seek(0)
|
upload.fp.seek(0)
|
||||||
item.model_file = UploadedFile(pub.app_dir, None, upload)
|
item.model_file = UploadedFile(pub.app_dir, None, upload)
|
||||||
item.convert_to_pdf = False
|
item.convert_to_pdf = False
|
||||||
item.perform(formdata)
|
item.perform(formdata)
|
||||||
|
|
||||||
new_content = zipfile.ZipFile(open(formdata.evolution[0].parts[0].filename)).read('content.xml')
|
new_content = zipfile.ZipFile(open(formdata.evolution[0].parts[0].filename, 'rb')).read('content.xml')
|
||||||
assert '>foo-export-to-template-with-django<' in new_content
|
assert b'>foo-export-to-template-with-django<' in new_content
|
||||||
|
|
||||||
formdef.name = 'Name with a \' simple quote'
|
formdef.name = 'Name with a \' simple quote'
|
||||||
formdef.store()
|
formdef.store()
|
||||||
item.perform(formdata)
|
item.perform(formdata)
|
||||||
|
|
||||||
new_content = zipfile.ZipFile(open(formdata.evolution[0].parts[1].filename)).read('content.xml')
|
new_content = zipfile.ZipFile(open(formdata.evolution[0].parts[1].filename, 'rb')).read('content.xml')
|
||||||
assert '>Name with a \' simple quote<' in new_content
|
assert b'>Name with a \' simple quote<' in new_content
|
||||||
|
|
||||||
formdef.name = 'A <> name'
|
formdef.name = 'A <> name'
|
||||||
formdef.store()
|
formdef.store()
|
||||||
item.perform(formdata)
|
item.perform(formdata)
|
||||||
|
|
||||||
new_content = zipfile.ZipFile(open(formdata.evolution[0].parts[2].filename)).read('content.xml')
|
new_content = zipfile.ZipFile(open(formdata.evolution[0].parts[2].filename, 'rb')).read('content.xml')
|
||||||
assert '>A <> name<' in new_content
|
assert b'>A <> name<' in new_content
|
||||||
|
|
||||||
def test_global_timeouts(two_pubs):
|
def test_global_timeouts(two_pubs):
|
||||||
pub = two_pubs
|
pub = two_pubs
|
||||||
|
@ -3515,7 +3524,7 @@ def test_set_backoffice_field_file(http_requests, two_pubs):
|
||||||
|
|
||||||
# store a PiclableUpload
|
# store a PiclableUpload
|
||||||
upload = PicklableUpload('test.jpeg', 'image/jpeg')
|
upload = PicklableUpload('test.jpeg', 'image/jpeg')
|
||||||
upload.receive([open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg')).read()])
|
upload.receive([open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg'), 'rb').read()])
|
||||||
|
|
||||||
formdata = formdef.data_class()()
|
formdata = formdef.data_class()()
|
||||||
formdata.data = {'00': upload}
|
formdata.data = {'00': upload}
|
||||||
|
@ -3528,13 +3537,12 @@ def test_set_backoffice_field_file(http_requests, two_pubs):
|
||||||
formdata = formdef.data_class().get(formdata.id)
|
formdata = formdef.data_class().get(formdata.id)
|
||||||
assert formdata.data['bo1'].base_filename == 'test.jpeg'
|
assert formdata.data['bo1'].base_filename == 'test.jpeg'
|
||||||
assert formdata.data['bo1'].content_type == 'image/jpeg'
|
assert formdata.data['bo1'].content_type == 'image/jpeg'
|
||||||
assert formdata.data['bo1'].get_content() == open(os.path.join(os.path.dirname(__file__),
|
assert formdata.data['bo1'].get_content() == open(os.path.join(os.path.dirname(__file__), 'image-with-gps-data.jpeg'), 'rb').read()
|
||||||
'image-with-gps-data.jpeg')).read()
|
|
||||||
|
|
||||||
# same test with PicklableUpload wcs.qommon.form
|
# same test with PicklableUpload wcs.qommon.form
|
||||||
from wcs.qommon.form import PicklableUpload as PicklableUpload2
|
from wcs.qommon.form import PicklableUpload as PicklableUpload2
|
||||||
upload2 = PicklableUpload2('test2.odt', 'application/vnd.oasis.opendocument.text')
|
upload2 = PicklableUpload2('test2.odt', 'application/vnd.oasis.opendocument.text')
|
||||||
upload2.receive([open(os.path.join(os.path.dirname(__file__), 'template.odt')).read()])
|
upload2.receive([open(os.path.join(os.path.dirname(__file__), 'template.odt'), 'rb').read()])
|
||||||
formdata = formdef.data_class()()
|
formdata = formdef.data_class()()
|
||||||
formdata.data = {'00': upload2}
|
formdata.data = {'00': upload2}
|
||||||
formdata.just_created()
|
formdata.just_created()
|
||||||
|
@ -3544,8 +3552,7 @@ def test_set_backoffice_field_file(http_requests, two_pubs):
|
||||||
formdata = formdef.data_class().get(formdata.id)
|
formdata = formdef.data_class().get(formdata.id)
|
||||||
assert formdata.data['bo1'].base_filename == 'test2.odt'
|
assert formdata.data['bo1'].base_filename == 'test2.odt'
|
||||||
assert formdata.data['bo1'].content_type == 'application/vnd.oasis.opendocument.text'
|
assert formdata.data['bo1'].content_type == 'application/vnd.oasis.opendocument.text'
|
||||||
assert formdata.data['bo1'].get_content() == open(os.path.join(os.path.dirname(__file__),
|
assert formdata.data['bo1'].get_content() == open(os.path.join(os.path.dirname(__file__), 'template.odt'), 'rb').read()
|
||||||
'template.odt')).read()
|
|
||||||
|
|
||||||
# check storing response as attachment
|
# check storing response as attachment
|
||||||
two_pubs.substitutions.feed(formdata)
|
two_pubs.substitutions.feed(formdata)
|
||||||
|
@ -3579,7 +3586,7 @@ def test_set_backoffice_field_file(http_requests, two_pubs):
|
||||||
|
|
||||||
formdata = formdef.data_class().get(formdata.id)
|
formdata = formdef.data_class().get(formdata.id)
|
||||||
assert formdata.data['bo1'].base_filename == 'hello.txt'
|
assert formdata.data['bo1'].base_filename == 'hello.txt'
|
||||||
assert formdata.data['bo1'].get_content() == 'hello world'
|
assert formdata.data['bo1'].get_content() == b'hello world'
|
||||||
|
|
||||||
item = SetBackofficeFieldsWorkflowStatusItem()
|
item = SetBackofficeFieldsWorkflowStatusItem()
|
||||||
item.parent = st1
|
item.parent = st1
|
||||||
|
@ -3589,7 +3596,7 @@ def test_set_backoffice_field_file(http_requests, two_pubs):
|
||||||
|
|
||||||
formdata = formdef.data_class().get(formdata.id)
|
formdata = formdef.data_class().get(formdata.id)
|
||||||
assert formdata.data['bo1'].base_filename == 'hello.txt'
|
assert formdata.data['bo1'].base_filename == 'hello.txt'
|
||||||
assert formdata.data['bo1'].get_content() == 'HELLO WORLD'
|
assert formdata.data['bo1'].get_content() == b'HELLO WORLD'
|
||||||
|
|
||||||
hello_world = formdata.data['bo1']
|
hello_world = formdata.data['bo1']
|
||||||
# check wrong value, or None (no file)
|
# check wrong value, or None (no file)
|
||||||
|
@ -3607,7 +3614,7 @@ def test_set_backoffice_field_file(http_requests, two_pubs):
|
||||||
formdata = formdef.data_class().get(formdata.id)
|
formdata = formdef.data_class().get(formdata.id)
|
||||||
if value is not None: # wrong value : do nothing
|
if value is not None: # wrong value : do nothing
|
||||||
assert formdata.data['bo1'].base_filename == 'hello.txt'
|
assert formdata.data['bo1'].base_filename == 'hello.txt'
|
||||||
assert formdata.data['bo1'].get_content() == 'HELLO WORLD'
|
assert formdata.data['bo1'].get_content() == b'HELLO WORLD'
|
||||||
assert LoggedError.count() == 1
|
assert LoggedError.count() == 1
|
||||||
logged_error = LoggedError.select()[0]
|
logged_error = LoggedError.select()[0]
|
||||||
assert logged_error.summary.startswith('Failed to convert')
|
assert logged_error.summary.startswith('Failed to convert')
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import json
|
import json
|
||||||
import pytest
|
import pytest
|
||||||
from StringIO import StringIO
|
|
||||||
|
|
||||||
from wcs.qommon.http_request import HTTPRequest
|
from wcs.qommon.http_request import HTTPRequest
|
||||||
from wcs.qommon.template import Template
|
from wcs.qommon.template import Template
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import cPickle
|
|
||||||
import email.header
|
import email.header
|
||||||
import email.parser
|
import email.parser
|
||||||
import os
|
import os
|
||||||
|
@ -9,14 +8,16 @@ import pytest
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import urlparse
|
|
||||||
|
|
||||||
from wcs import sql, sessions
|
from wcs import sql, sessions
|
||||||
|
|
||||||
from webtest import TestApp
|
from webtest import TestApp
|
||||||
from quixote import cleanup, get_publisher
|
from quixote import cleanup, get_publisher
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.utils.encoding import force_bytes, force_text
|
||||||
|
from django.utils.six.moves.urllib import parse as urlparse
|
||||||
|
|
||||||
|
from wcs.qommon import force_str
|
||||||
import wcs
|
import wcs
|
||||||
import wcs.wsgi
|
import wcs.wsgi
|
||||||
from wcs import publisher, compat
|
from wcs import publisher, compat
|
||||||
|
@ -101,7 +102,7 @@ def create_temporary_pub(sql_mode=False, templates_mode=False, lazy_mode=False):
|
||||||
created = True
|
created = True
|
||||||
|
|
||||||
# always reset site-options.cfg
|
# always reset site-options.cfg
|
||||||
fd = file(os.path.join(pub.app_dir, 'site-options.cfg'), 'w')
|
fd = open(os.path.join(pub.app_dir, 'site-options.cfg'), 'w')
|
||||||
fd.write('[wscall-secrets]\n')
|
fd.write('[wscall-secrets]\n')
|
||||||
fd.write('idp.example.net = BAR\n')
|
fd.write('idp.example.net = BAR\n')
|
||||||
fd.write('\n')
|
fd.write('\n')
|
||||||
|
@ -238,14 +239,14 @@ class EmailsMocking(object):
|
||||||
else:
|
else:
|
||||||
payload = msg.get_payload(decode=True)
|
payload = msg.get_payload(decode=True)
|
||||||
payloads = [payload]
|
payloads = [payload]
|
||||||
self.emails[subject] = {
|
self.emails[force_text(subject)] = {
|
||||||
'from': msg_from,
|
'from': msg_from,
|
||||||
'to': email.header.decode_header(msg['To'])[0][0],
|
'to': email.header.decode_header(msg['To'])[0][0],
|
||||||
'payload': payload,
|
'payload': force_str(payload if payload else ''),
|
||||||
'payloads': payloads,
|
'payloads': payloads,
|
||||||
'msg': msg,
|
'msg': msg,
|
||||||
}
|
}
|
||||||
self.emails[subject]['email_rcpt'] = rcpts
|
self.emails[force_text(subject)]['email_rcpt'] = rcpts
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
pass
|
pass
|
||||||
|
@ -337,6 +338,8 @@ class HttpRequestsMocking(object):
|
||||||
except IOError:
|
except IOError:
|
||||||
status = 404
|
status = 404
|
||||||
|
|
||||||
|
data = force_bytes(data)
|
||||||
|
|
||||||
class FakeResponse(object):
|
class FakeResponse(object):
|
||||||
def __init__(self, status, data, headers):
|
def __init__(self, status, data, headers):
|
||||||
self.status_code = status
|
self.status_code = status
|
||||||
|
|
15
tox.ini
15
tox.ini
|
@ -1,6 +1,6 @@
|
||||||
[tox]
|
[tox]
|
||||||
toxworkdir = {env:TMPDIR:/tmp}/tox-{env:USER}/wcs/{env:BRANCH_NAME:}
|
toxworkdir = {env:TMPDIR:/tmp}/tox-{env:USER}/wcs/{env:BRANCH_NAME:}
|
||||||
envlist = py2-pylint-coverage
|
envlist = py3-pylint-coverage,py2
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
sitepackages = true
|
sitepackages = true
|
||||||
|
@ -16,7 +16,8 @@ setenv =
|
||||||
passenv =
|
passenv =
|
||||||
USER
|
USER
|
||||||
deps =
|
deps =
|
||||||
pytest
|
py2: pytest
|
||||||
|
py3: pytest>=3.6,<5
|
||||||
pytest-mock
|
pytest-mock
|
||||||
pytest-cov
|
pytest-cov
|
||||||
pytest-django
|
pytest-django
|
||||||
|
@ -24,11 +25,19 @@ deps =
|
||||||
mechanize
|
mechanize
|
||||||
gadjo
|
gadjo
|
||||||
pyquery
|
pyquery
|
||||||
|
mock
|
||||||
|
requests
|
||||||
|
vobject
|
||||||
|
qrcode
|
||||||
|
Pillow
|
||||||
|
python-magic
|
||||||
|
docutils
|
||||||
git+https://git.entrouvert.org/debian/django-ckeditor.git
|
git+https://git.entrouvert.org/debian/django-ckeditor.git
|
||||||
django >=1.11, <1.12
|
django >=1.11, <1.12
|
||||||
django-ratelimit<3
|
django-ratelimit<3
|
||||||
pyproj
|
pyproj
|
||||||
pylint<1.8
|
pylint<1.8
|
||||||
|
py3: Quixote>=3.0
|
||||||
commands =
|
commands =
|
||||||
py.test {env:COVERAGE:} {posargs:tests/}
|
py.test -v {env:COVERAGE:} {posargs:tests/}
|
||||||
pylint: ./pylint.sh wcs/
|
pylint: ./pylint.sh wcs/
|
||||||
|
|
|
@ -1,182 +0,0 @@
|
||||||
# w.c.s. - web application for online forms
|
|
||||||
# Copyright (C) 2005-2010 Entr'ouvert
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation; either version 2 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
import time
|
|
||||||
import email.Parser
|
|
||||||
|
|
||||||
from quixote import get_response, redirect
|
|
||||||
from quixote.directory import Directory
|
|
||||||
from quixote.html import htmltext, TemplateIO
|
|
||||||
|
|
||||||
from ..qommon import _
|
|
||||||
from ..qommon import errors
|
|
||||||
from ..qommon import misc
|
|
||||||
from ..qommon.bounces import Bounce
|
|
||||||
from ..qommon.backoffice.menu import html_top
|
|
||||||
from ..qommon.admin.menu import command_icon
|
|
||||||
|
|
||||||
from ..qommon.form import *
|
|
||||||
from ..qommon.misc import get_cfg
|
|
||||||
|
|
||||||
def get_email_type_label(type):
|
|
||||||
from .settings import EmailsDirectory
|
|
||||||
return EmailsDirectory.emails_dict.get(type, {}).get('description')
|
|
||||||
|
|
||||||
class BouncePage(Directory):
|
|
||||||
_q_exports = ['', 'delete']
|
|
||||||
|
|
||||||
def __init__(self, component):
|
|
||||||
self.bounce = Bounce.get(component)
|
|
||||||
get_response().breadcrumb.append((component + '/', _('bounce')))
|
|
||||||
|
|
||||||
|
|
||||||
def _q_index(self):
|
|
||||||
html_top('bounces', title = _('Bounce'))
|
|
||||||
r = TemplateIO(html=True)
|
|
||||||
|
|
||||||
r += htmltext('<div class="form">')
|
|
||||||
|
|
||||||
if self.bounce.email_type:
|
|
||||||
r += htmltext('<div class="title">%s</div>') % _('Email Type')
|
|
||||||
r += htmltext('<div class="StringWidget content">')
|
|
||||||
r += _(get_email_type_label(self.bounce.email_type))
|
|
||||||
r += htmltext('</div>')
|
|
||||||
|
|
||||||
r += htmltext('<div class="title">%s</div>') % _('Arrival Time')
|
|
||||||
r += htmltext('<div class="StringWidget content">')
|
|
||||||
r += misc.localstrftime(time.localtime(self.bounce.arrival_time))
|
|
||||||
r += htmltext('</div>')
|
|
||||||
|
|
||||||
if self.bounce.addrs:
|
|
||||||
r += htmltext('<div class="title">%s</div>') % _('Failed Addresses')
|
|
||||||
r += htmltext('<div class="StringWidget content">')
|
|
||||||
r += ', '.join(self.bounce.addrs)
|
|
||||||
r += htmltext('</div>')
|
|
||||||
|
|
||||||
if self.bounce.original_rcpts:
|
|
||||||
r += htmltext('<div class="title">%s</div>') % _('Original Recipients')
|
|
||||||
r += htmltext('<div class="StringWidget content">')
|
|
||||||
r += ', '.join(self.bounce.original_rcpts)
|
|
||||||
r += htmltext('</div>')
|
|
||||||
|
|
||||||
r += htmltext('<div class="title">%s</div>') % _('Bounce Message')
|
|
||||||
r += htmltext('<div class="StringWidget content">')
|
|
||||||
r += htmltext('<pre style="max-height: 20em;">')
|
|
||||||
r += self.bounce.bounce_message
|
|
||||||
r += htmltext('</pre>')
|
|
||||||
r += htmltext('</div>')
|
|
||||||
|
|
||||||
if self.bounce.original_message:
|
|
||||||
parser = email.Parser.Parser()
|
|
||||||
|
|
||||||
msg = parser.parsestr(self.bounce.original_message)
|
|
||||||
if msg.is_multipart():
|
|
||||||
for m in msg.get_payload():
|
|
||||||
if m.get_content_type() == 'text/plain':
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
m = None
|
|
||||||
elif msg.get_content_type() == 'text/plain':
|
|
||||||
m = msg
|
|
||||||
else:
|
|
||||||
m = None
|
|
||||||
|
|
||||||
r += htmltext('<div class="title">%s</div>') % _('Original Message')
|
|
||||||
r += htmltext('<div class="StringWidget content">')
|
|
||||||
r += _('Subject: ')
|
|
||||||
subject, charset = email.Header.decode_header(msg['Subject'])[0]
|
|
||||||
if charset:
|
|
||||||
encoding = get_publisher().site_charset
|
|
||||||
r += unicode(subject, charset).encode(encoding)
|
|
||||||
else:
|
|
||||||
r += subject
|
|
||||||
r += htmltext('</div>')
|
|
||||||
|
|
||||||
if m:
|
|
||||||
r += htmltext('<div class="StringWidget content">')
|
|
||||||
r += htmltext('<pre>')
|
|
||||||
r += m.get_payload()
|
|
||||||
r += htmltext('</pre>')
|
|
||||||
r += htmltext('</div>')
|
|
||||||
|
|
||||||
r += htmltext('</div>') # form
|
|
||||||
return r.getvalue()
|
|
||||||
|
|
||||||
|
|
||||||
def delete(self):
|
|
||||||
form = Form(enctype='multipart/form-data')
|
|
||||||
form.widgets.append(HtmlWidget('<p>%s</p>' % _(
|
|
||||||
'You are about to irrevocably delete this bounce.')))
|
|
||||||
form.add_submit('delete', _('Delete'))
|
|
||||||
form.add_submit('cancel', _('Cancel'))
|
|
||||||
if form.get_widget('cancel').parse():
|
|
||||||
return redirect('..')
|
|
||||||
if not form.is_submitted() or form.has_errors():
|
|
||||||
get_response().breadcrumb.append(('delete', _('Delete')))
|
|
||||||
html_top('bounces', title = _('Delete Bounce'))
|
|
||||||
r = TemplateIO(html=True)
|
|
||||||
r += htmltext('<h2>%s</h2>') % _('Deleting Bounce')
|
|
||||||
r += form.render()
|
|
||||||
return r.getvalue()
|
|
||||||
else:
|
|
||||||
self.bounce.remove_self()
|
|
||||||
return redirect('..')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class BouncesDirectory(Directory):
|
|
||||||
_q_exports = ['']
|
|
||||||
|
|
||||||
def _q_traverse(self, path):
|
|
||||||
get_response().breadcrumb.append( ('bounces/', _('Bounces')) )
|
|
||||||
return Directory._q_traverse(self, path)
|
|
||||||
|
|
||||||
def is_visible(self, *args):
|
|
||||||
return bool(get_cfg('emails', {}).get('bounce_handler') is True)
|
|
||||||
|
|
||||||
def _q_index(self):
|
|
||||||
html_top('bounces', title = _('Bounces'))
|
|
||||||
|
|
||||||
bounces = Bounce.select(ignore_errors=True)
|
|
||||||
bounces.sort(lambda x,y: cmp(x.arrival_time, y.arrival_time))
|
|
||||||
|
|
||||||
r = TemplateIO(html=True)
|
|
||||||
r += htmltext('<ul class="biglist">')
|
|
||||||
for bounce in bounces:
|
|
||||||
r += htmltext('<li>')
|
|
||||||
r += htmltext('<strong class="label">')
|
|
||||||
r += misc.localstrftime(time.localtime(bounce.arrival_time))
|
|
||||||
if bounce.email_type:
|
|
||||||
r += ' - '
|
|
||||||
r += _(get_email_type_label(bounce.email_type))
|
|
||||||
r += htmltext('</strong>')
|
|
||||||
r += htmltext('<p class="details">')
|
|
||||||
if bounce.addrs:
|
|
||||||
r += ', '.join(bounce.addrs)
|
|
||||||
r += htmltext('</p>')
|
|
||||||
|
|
||||||
r += htmltext('<p class="commands">')
|
|
||||||
r += command_icon('%s/' % bounce.id, 'view')
|
|
||||||
r += command_icon('%s/delete' % bounce.id, 'remove', popup = True)
|
|
||||||
r += htmltext('</p></li>')
|
|
||||||
r += htmltext('</ul>')
|
|
||||||
return r.getvalue()
|
|
||||||
|
|
||||||
def _q_lookup(self, component):
|
|
||||||
try:
|
|
||||||
return BouncePage(component)
|
|
||||||
except KeyError:
|
|
||||||
raise errors.TraversalError()
|
|
|
@ -18,10 +18,10 @@ from quixote import redirect
|
||||||
from quixote.directory import Directory
|
from quixote.directory import Directory
|
||||||
from quixote.html import TemplateIO, htmltext
|
from quixote.html import TemplateIO, htmltext
|
||||||
|
|
||||||
from ..qommon import _
|
from wcs.qommon import _
|
||||||
from wcs.categories import Category
|
from wcs.categories import Category
|
||||||
from ..qommon.form import *
|
from wcs.qommon.form import *
|
||||||
from ..qommon.backoffice.menu import html_top
|
from wcs.qommon.backoffice.menu import html_top
|
||||||
|
|
||||||
from wcs.formdef import FormDef
|
from wcs.formdef import FormDef
|
||||||
|
|
||||||
|
|
|
@ -18,11 +18,11 @@ from quixote import redirect
|
||||||
from quixote.directory import Directory
|
from quixote.directory import Directory
|
||||||
from quixote.html import TemplateIO, htmltext
|
from quixote.html import TemplateIO, htmltext
|
||||||
|
|
||||||
from ..qommon import _
|
from wcs.qommon import _
|
||||||
from ..qommon import errors, template
|
from wcs.qommon import errors, template
|
||||||
from ..qommon.form import *
|
from wcs.qommon.form import *
|
||||||
from ..qommon.misc import json_response
|
from wcs.qommon.misc import json_response
|
||||||
from ..qommon.backoffice.menu import html_top
|
from wcs.qommon.backoffice.menu import html_top
|
||||||
from wcs.data_sources import (NamedDataSource, DataSourceSelectionWidget,
|
from wcs.data_sources import (NamedDataSource, DataSourceSelectionWidget,
|
||||||
get_structured_items)
|
get_structured_items)
|
||||||
from wcs.formdef import FormDef, get_formdefs_of_all_kinds
|
from wcs.formdef import FormDef, get_formdefs_of_all_kinds
|
||||||
|
|
|
@ -20,11 +20,11 @@ from quixote import redirect
|
||||||
from quixote.directory import Directory
|
from quixote.directory import Directory
|
||||||
from quixote.html import TemplateIO, htmltext, htmlescape
|
from quixote.html import TemplateIO, htmltext, htmlescape
|
||||||
|
|
||||||
from ..qommon import _
|
from wcs.qommon import _
|
||||||
from ..qommon.form import *
|
from wcs.qommon.form import *
|
||||||
from ..qommon import errors, misc
|
from wcs.qommon import errors, misc
|
||||||
from ..qommon.backoffice.menu import html_top
|
from wcs.qommon.backoffice.menu import html_top
|
||||||
from ..qommon.admin.menu import command_icon
|
from wcs.qommon.admin.menu import command_icon
|
||||||
|
|
||||||
from wcs import fields
|
from wcs import fields
|
||||||
from wcs.formdef import FormDef
|
from wcs.formdef import FormDef
|
||||||
|
|
|
@ -22,21 +22,21 @@ import difflib
|
||||||
import tarfile
|
import tarfile
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from django.utils.six import StringIO
|
from django.utils.six import BytesIO, StringIO
|
||||||
|
|
||||||
from quixote import get_response, redirect
|
from quixote import get_response, redirect
|
||||||
from quixote.directory import Directory, AccessControlled
|
from quixote.directory import Directory, AccessControlled
|
||||||
from quixote.html import TemplateIO, htmltext
|
from quixote.html import TemplateIO, htmltext
|
||||||
|
|
||||||
from ..qommon import _
|
from wcs.qommon import _, force_str
|
||||||
from ..qommon import misc
|
from wcs.qommon import misc
|
||||||
from ..qommon.errors import *
|
from wcs.qommon.errors import *
|
||||||
from ..qommon.form import *
|
from wcs.qommon.form import *
|
||||||
from ..qommon.backoffice.menu import html_top
|
from wcs.qommon.backoffice.menu import html_top
|
||||||
from ..qommon import get_logger
|
from wcs.qommon import get_logger
|
||||||
from ..qommon.misc import C_
|
from wcs.qommon.misc import C_
|
||||||
|
|
||||||
from ..qommon.afterjobs import AfterJob
|
from wcs.qommon.afterjobs import AfterJob
|
||||||
|
|
||||||
from wcs.formdef import FormDef, FormdefImportError, FormdefImportRecoverableError
|
from wcs.formdef import FormDef, FormdefImportError, FormdefImportRecoverableError
|
||||||
from wcs.categories import Category
|
from wcs.categories import Category
|
||||||
|
@ -483,8 +483,8 @@ class FormDefPage(Directory):
|
||||||
if self.formdef.workflow.roles:
|
if self.formdef.workflow.roles:
|
||||||
if not self.formdef.workflow_roles:
|
if not self.formdef.workflow_roles:
|
||||||
self.formdef.workflow_roles = {}
|
self.formdef.workflow_roles = {}
|
||||||
workflow_roles = (self.formdef.workflow.roles or {}).items()
|
workflow_roles = list((self.formdef.workflow.roles or {}).items())
|
||||||
workflow_roles.sort(key=lambda x: -1 if x[0] == '_receiver' else misc.simplify(x[1]))
|
workflow_roles.sort(key=lambda x: '' if x[0] == '_receiver' else misc.simplify(x[1]))
|
||||||
for (wf_role_id, wf_role_label) in workflow_roles:
|
for (wf_role_id, wf_role_label) in workflow_roles:
|
||||||
role_id = self.formdef.workflow_roles.get(wf_role_id)
|
role_id = self.formdef.workflow_roles.get(wf_role_id)
|
||||||
if role_id:
|
if role_id:
|
||||||
|
@ -706,7 +706,7 @@ class FormDefPage(Directory):
|
||||||
form.add(CheckboxesWidget, 'required_authentication_contexts',
|
form.add(CheckboxesWidget, 'required_authentication_contexts',
|
||||||
title=_('Required authentication contexts'),
|
title=_('Required authentication contexts'),
|
||||||
value=self.formdef.required_authentication_contexts,
|
value=self.formdef.required_authentication_contexts,
|
||||||
options=auth_contexts.items())
|
options=list(auth_contexts.items()))
|
||||||
form.add_submit('submit', _('Submit'))
|
form.add_submit('submit', _('Submit'))
|
||||||
form.add_submit('cancel', _('Cancel'))
|
form.add_submit('cancel', _('Cancel'))
|
||||||
if form.get_widget('cancel').parse():
|
if form.get_widget('cancel').parse():
|
||||||
|
@ -1158,7 +1158,7 @@ class FormDefPage(Directory):
|
||||||
form.add(CheckboxWidget, 'force', title=_('Overwrite despite data loss'))
|
form.add(CheckboxWidget, 'force', title=_('Overwrite despite data loss'))
|
||||||
else:
|
else:
|
||||||
form.add_hidden('force', 'ok')
|
form.add_hidden('force', 'ok')
|
||||||
form.add_hidden('new_formdef', ET.tostring(new_formdef.export_to_xml(include_id=True)))
|
form.add_hidden('new_formdef', force_str(ET.tostring(new_formdef.export_to_xml(include_id=True))))
|
||||||
form.add_submit('submit', _('Submit'))
|
form.add_submit('submit', _('Submit'))
|
||||||
form.add_submit('cancel', _('Cancel'))
|
form.add_submit('cancel', _('Cancel'))
|
||||||
r += form.render()
|
r += form.render()
|
||||||
|
@ -1174,7 +1174,7 @@ class FormDefPage(Directory):
|
||||||
response.set_content_type('application/x-wcs-form')
|
response.set_content_type('application/x-wcs-form')
|
||||||
response.set_header('content-disposition',
|
response.set_header('content-disposition',
|
||||||
'attachment; filename=%s-%s.wcs' % (self.formdef_export_prefix, self.formdef.url_name))
|
'attachment; filename=%s-%s.wcs' % (self.formdef_export_prefix, self.formdef.url_name))
|
||||||
return '<?xml version="1.0" encoding="iso-8859-15"?>\n' + ET.tostring(x)
|
return '<?xml version="1.0"?>\n' + force_str(ET.tostring(x))
|
||||||
|
|
||||||
def archive(self):
|
def archive(self):
|
||||||
if get_publisher().is_using_postgresql():
|
if get_publisher().is_using_postgresql():
|
||||||
|
@ -1227,7 +1227,7 @@ class FormDefPage(Directory):
|
||||||
date = time.strptime(date, misc.date_format())
|
date = time.strptime(date, misc.date_format())
|
||||||
all_forms = [x for x in all_forms if x.last_update_time < date]
|
all_forms = [x for x in all_forms if x.last_update_time < date]
|
||||||
|
|
||||||
self.fd = StringIO()
|
self.fd = BytesIO()
|
||||||
t = tarfile.open('wcs.tar.gz', 'w:gz', fileobj=self.fd)
|
t = tarfile.open('wcs.tar.gz', 'w:gz', fileobj=self.fd)
|
||||||
t.add(self.formdef.get_object_filename(), 'formdef')
|
t.add(self.formdef.get_object_filename(), 'formdef')
|
||||||
for formdata in all_forms:
|
for formdata in all_forms:
|
||||||
|
|
|
@ -19,9 +19,9 @@ import re
|
||||||
from quixote import get_response, get_publisher, redirect
|
from quixote import get_response, get_publisher, redirect
|
||||||
from quixote.directory import Directory
|
from quixote.directory import Directory
|
||||||
from quixote.html import TemplateIO, htmltext
|
from quixote.html import TemplateIO, htmltext
|
||||||
from ..qommon import _, ngettext
|
from wcs.qommon import _, ngettext
|
||||||
from ..qommon import errors, get_cfg
|
from wcs.qommon import errors, get_cfg
|
||||||
from ..qommon.misc import localstrftime
|
from wcs.qommon.misc import localstrftime
|
||||||
|
|
||||||
from wcs.logged_errors import LoggedError
|
from wcs.logged_errors import LoggedError
|
||||||
|
|
||||||
|
|
|
@ -18,11 +18,11 @@ from quixote import redirect
|
||||||
from quixote.directory import Directory
|
from quixote.directory import Directory
|
||||||
from quixote.html import TemplateIO, htmltext
|
from quixote.html import TemplateIO, htmltext
|
||||||
|
|
||||||
from ..qommon import _, get_cfg
|
from wcs.qommon import _, get_cfg
|
||||||
from ..qommon import errors
|
from wcs.qommon import errors
|
||||||
from ..qommon.form import *
|
from wcs.qommon.form import *
|
||||||
|
|
||||||
from ..qommon.backoffice.menu import html_top
|
from wcs.qommon.backoffice.menu import html_top
|
||||||
|
|
||||||
from wcs.roles import Role, get_user_roles
|
from wcs.roles import Role, get_user_roles
|
||||||
from wcs.formdef import FormDef
|
from wcs.formdef import FormDef
|
||||||
|
|
|
@ -27,29 +27,30 @@ import shutil
|
||||||
|
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
from django.utils.six import StringIO
|
from django.utils.encoding import force_bytes
|
||||||
|
from django.utils.six import BytesIO, StringIO
|
||||||
|
|
||||||
from quixote import get_publisher, get_request, get_response, redirect
|
from quixote import get_publisher, get_request, get_response, redirect
|
||||||
from quixote.directory import Directory
|
from quixote.directory import Directory
|
||||||
from quixote.html import TemplateIO, htmltext
|
from quixote.html import TemplateIO, htmltext
|
||||||
|
|
||||||
from ..qommon import _
|
from wcs.qommon import _
|
||||||
from ..qommon import get_cfg
|
from wcs.qommon import get_cfg
|
||||||
from ..qommon import errors
|
from wcs.qommon import errors
|
||||||
from ..qommon import misc
|
from wcs.qommon import misc
|
||||||
from ..qommon import template
|
from wcs.qommon import template
|
||||||
from ..qommon.form import *
|
from wcs.qommon.form import *
|
||||||
from ..qommon.sms import SMS
|
from wcs.qommon.sms import SMS
|
||||||
|
|
||||||
from ..qommon.afterjobs import AfterJob
|
from wcs.qommon.afterjobs import AfterJob
|
||||||
from ..qommon.backoffice.menu import html_top
|
from wcs.qommon.backoffice.menu import html_top
|
||||||
from ..qommon.admin.menu import error_page
|
from wcs.qommon.admin.menu import error_page
|
||||||
from ..qommon.admin.cfg import cfg_submit
|
from wcs.qommon.admin.cfg import cfg_submit
|
||||||
from ..qommon.admin.emails import EmailsDirectory
|
from wcs.qommon.admin.emails import EmailsDirectory
|
||||||
from wcs.qommon.admin.texts import TextsDirectory
|
from wcs.qommon.admin.texts import TextsDirectory
|
||||||
from ..qommon.admin.settings import SettingsDirectory as QommonSettingsDirectory
|
from wcs.qommon.admin.settings import SettingsDirectory as QommonSettingsDirectory
|
||||||
from ..qommon.admin.logger import LoggerDirectory
|
from wcs.qommon.admin.logger import LoggerDirectory
|
||||||
from ..qommon import ident
|
from wcs.qommon import ident
|
||||||
|
|
||||||
from wcs.formdef import FormDef
|
from wcs.formdef import FormDef
|
||||||
from wcs.carddef import CardDef
|
from wcs.carddef import CardDef
|
||||||
|
@ -377,12 +378,12 @@ class ThemePreviewDirectory(Directory):
|
||||||
|
|
||||||
if path[1] in ('backoffice', 'admin') or \
|
if path[1] in ('backoffice', 'admin') or \
|
||||||
get_request().get_method() == 'POST':
|
get_request().get_method() == 'POST':
|
||||||
from ..qommon.template import error_page as base_error_page
|
from wcs.qommon.template import error_page as base_error_page
|
||||||
output = base_error_page(_("The theme preview doesn't support this."))
|
output = base_error_page(_("The theme preview doesn't support this."))
|
||||||
else:
|
else:
|
||||||
output = root_directory._q_traverse(path[1:])
|
output = root_directory._q_traverse(path[1:])
|
||||||
|
|
||||||
from ..qommon.template import decorate
|
from wcs.qommon.template import decorate
|
||||||
if isinstance(output, template.QommonTemplateResponse):
|
if isinstance(output, template.QommonTemplateResponse):
|
||||||
output = template.render(output.templates, output.context)
|
output = template.render(output.templates, output.context)
|
||||||
theme_preview = decorate(output, response)
|
theme_preview = decorate(output, response)
|
||||||
|
@ -554,7 +555,6 @@ class SettingsDirectory(QommonSettingsDirectory):
|
||||||
('users', N_('Users')),
|
('users', N_('Users')),
|
||||||
('roles', N_('Roles')),
|
('roles', N_('Roles')),
|
||||||
('categories', N_('Categories')),
|
('categories', N_('Categories')),
|
||||||
('bounces', N_('Bounces')),
|
|
||||||
('settings', N_('Settings')),
|
('settings', N_('Settings')),
|
||||||
]
|
]
|
||||||
for k, v in admin_sections:
|
for k, v in admin_sections:
|
||||||
|
@ -574,7 +574,7 @@ class SettingsDirectory(QommonSettingsDirectory):
|
||||||
for k in permission_keys:
|
for k in permission_keys:
|
||||||
authorised_roles = [str(x) for x in permissions_cfg.get(k) or []]
|
authorised_roles = [str(x) for x in permissions_cfg.get(k) or []]
|
||||||
value[-1].append(bool(str(role.id) in authorised_roles))
|
value[-1].append(bool(str(role.id) in authorised_roles))
|
||||||
colrows_hash = hashlib.md5('%r-%r' % (rows, permissions)).hexdigest()
|
colrows_hash = hashlib.md5(force_bytes('%r-%r' % (rows, permissions))).hexdigest()
|
||||||
|
|
||||||
form.add_hidden('hash', colrows_hash)
|
form.add_hidden('hash', colrows_hash)
|
||||||
form.add(CheckboxesTableWidget, 'permissions', rows=rows, columns=permissions)
|
form.add(CheckboxesTableWidget, 'permissions', rows=rows, columns=permissions)
|
||||||
|
@ -622,7 +622,7 @@ class SettingsDirectory(QommonSettingsDirectory):
|
||||||
def themes(self):
|
def themes(self):
|
||||||
request = get_request()
|
request = get_request()
|
||||||
|
|
||||||
if not request.form.has_key('theme'):
|
if 'theme' not in request.form:
|
||||||
current_theme = get_cfg('branding', {}).get('theme', 'default')
|
current_theme = get_cfg('branding', {}).get('theme', 'default')
|
||||||
|
|
||||||
get_response().breadcrumb.append(('themes', _('Themes')))
|
get_response().breadcrumb.append(('themes', _('Themes')))
|
||||||
|
@ -669,7 +669,7 @@ class SettingsDirectory(QommonSettingsDirectory):
|
||||||
return r.getvalue()
|
return r.getvalue()
|
||||||
else:
|
else:
|
||||||
themes = template.get_themes()
|
themes = template.get_themes()
|
||||||
if themes.has_key(str(request.form['theme'])):
|
if str(request.form['theme']) in themes:
|
||||||
branding_cfg = get_cfg('branding', {})
|
branding_cfg = get_cfg('branding', {})
|
||||||
branding_cfg[str('theme')] = str(request.form['theme'])
|
branding_cfg[str('theme')] = str(request.form['theme'])
|
||||||
get_publisher().cfg[str('branding')] = branding_cfg
|
get_publisher().cfg[str('branding')] = branding_cfg
|
||||||
|
@ -686,7 +686,7 @@ class SettingsDirectory(QommonSettingsDirectory):
|
||||||
return redirect('themes')
|
return redirect('themes')
|
||||||
|
|
||||||
parent_theme_directory = os.path.dirname(theme_directory)
|
parent_theme_directory = os.path.dirname(theme_directory)
|
||||||
c = StringIO()
|
c = BytesIO()
|
||||||
z = zipfile.ZipFile(c, 'w')
|
z = zipfile.ZipFile(c, 'w')
|
||||||
for base, dirnames, filenames in os.walk(theme_directory):
|
for base, dirnames, filenames in os.walk(theme_directory):
|
||||||
basetheme = base[len(parent_theme_directory)+1:]
|
basetheme = base[len(parent_theme_directory)+1:]
|
||||||
|
@ -750,7 +750,7 @@ class SettingsDirectory(QommonSettingsDirectory):
|
||||||
get_session().message = ('error', _('Theme is missing a desc.xml file.'))
|
get_session().message = ('error', _('Theme is missing a desc.xml file.'))
|
||||||
return redirect('themes')
|
return redirect('themes')
|
||||||
desc_xml = z.read('%s/desc.xml' % theme_name)
|
desc_xml = z.read('%s/desc.xml' % theme_name)
|
||||||
theme_dict = template.get_theme_dict(StringIO(desc_xml))
|
theme_dict = template.get_theme_dict(StringIO(force_text(desc_xml)))
|
||||||
if theme_dict.get('name') != theme_name:
|
if theme_dict.get('name') != theme_name:
|
||||||
get_session().message = ('error', _('desc.xml is missing a name attribute.'))
|
get_session().message = ('error', _('desc.xml is missing a name attribute.'))
|
||||||
return redirect('themes')
|
return redirect('themes')
|
||||||
|
@ -763,7 +763,7 @@ class SettingsDirectory(QommonSettingsDirectory):
|
||||||
data = z.read(f)
|
data = z.read(f)
|
||||||
if not os.path.exists(os.path.dirname(path)):
|
if not os.path.exists(os.path.dirname(path)):
|
||||||
os.makedirs(os.path.dirname(path))
|
os.makedirs(os.path.dirname(path))
|
||||||
open(path, 'w').write(data)
|
open(path, 'wb').write(data)
|
||||||
z.close()
|
z.close()
|
||||||
return redirect('themes')
|
return redirect('themes')
|
||||||
|
|
||||||
|
@ -819,7 +819,7 @@ class SettingsDirectory(QommonSettingsDirectory):
|
||||||
else:
|
else:
|
||||||
template = form.get_widget('template').parse()
|
template = form.get_widget('template').parse()
|
||||||
if template in (DEFAULT_TEMPLATE_EZT, theme_default_template_ezt) or not template:
|
if template in (DEFAULT_TEMPLATE_EZT, theme_default_template_ezt) or not template:
|
||||||
if branding_cfg.has_key('template'):
|
if 'template' in branding_cfg:
|
||||||
del branding_cfg['template']
|
del branding_cfg['template']
|
||||||
else:
|
else:
|
||||||
branding_cfg['template'] = template
|
branding_cfg['template'] = template
|
||||||
|
@ -883,7 +883,7 @@ class SettingsDirectory(QommonSettingsDirectory):
|
||||||
self.settings = settings
|
self.settings = settings
|
||||||
|
|
||||||
def export(self, job):
|
def export(self, job):
|
||||||
c = StringIO()
|
c = BytesIO()
|
||||||
z = zipfile.ZipFile(c, 'w')
|
z = zipfile.ZipFile(c, 'w')
|
||||||
for d in self.dirs:
|
for d in self.dirs:
|
||||||
if d not in ('roles', 'categories', 'datasources', 'wscalls'):
|
if d not in ('roles', 'categories', 'datasources', 'wscalls'):
|
||||||
|
@ -900,19 +900,19 @@ class SettingsDirectory(QommonSettingsDirectory):
|
||||||
node = formdef.export_to_xml(include_id=True)
|
node = formdef.export_to_xml(include_id=True)
|
||||||
misc.indent_xml(node)
|
misc.indent_xml(node)
|
||||||
z.writestr(os.path.join('formdefs_xml', str(formdef.id)),
|
z.writestr(os.path.join('formdefs_xml', str(formdef.id)),
|
||||||
'<?xml version="1.0" encoding="iso-8859-15"?>\n' + ET.tostring(node))
|
b'<?xml version="1.0"?>\n' + ET.tostring(node))
|
||||||
if 'carddefs' in self.dirs:
|
if 'carddefs' in self.dirs:
|
||||||
for formdef in CardDef.select():
|
for formdef in CardDef.select():
|
||||||
node = formdef.export_to_xml(include_id=True)
|
node = formdef.export_to_xml(include_id=True)
|
||||||
misc.indent_xml(node)
|
misc.indent_xml(node)
|
||||||
z.writestr(os.path.join('carddefs_xml', str(formdef.id)),
|
z.writestr(os.path.join('carddefs_xml', str(formdef.id)),
|
||||||
'<?xml version="1.0" encoding="iso-8859-15"?>\n' + ET.tostring(node))
|
b'<?xml version="1.0"?>\n' + ET.tostring(node))
|
||||||
if 'workflows' in self.dirs:
|
if 'workflows' in self.dirs:
|
||||||
for workflow in Workflow.select():
|
for workflow in Workflow.select():
|
||||||
node = workflow.export_to_xml(include_id=True)
|
node = workflow.export_to_xml(include_id=True)
|
||||||
misc.indent_xml(node)
|
misc.indent_xml(node)
|
||||||
z.writestr(os.path.join('workflows_xml', str(workflow.id)),
|
z.writestr(os.path.join('workflows_xml', str(workflow.id)),
|
||||||
'<?xml version="1.0" encoding="iso-8859-15"?>\n' + ET.tostring(node))
|
b'<?xml version="1.0"?>\n' + ET.tostring(node))
|
||||||
|
|
||||||
if self.settings:
|
if self.settings:
|
||||||
z.write(os.path.join(self.app_dir, 'config.pck'), 'config.pck')
|
z.write(os.path.join(self.app_dir, 'config.pck'), 'config.pck')
|
||||||
|
|
|
@ -14,23 +14,26 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from django.utils import six
|
||||||
|
from django.utils.encoding import force_text
|
||||||
|
|
||||||
from quixote import get_publisher, get_response, get_request, get_session, redirect
|
from quixote import get_publisher, get_response, get_request, get_session, redirect
|
||||||
from quixote.directory import Directory
|
from quixote.directory import Directory
|
||||||
from quixote.html import TemplateIO, htmltext
|
from quixote.html import TemplateIO, htmltext
|
||||||
|
|
||||||
from ..qommon import _
|
from wcs.qommon import _, force_str
|
||||||
import wcs.qommon.storage as st
|
import wcs.qommon.storage as st
|
||||||
from ..qommon import errors
|
from wcs.qommon import errors
|
||||||
from ..qommon import misc, get_cfg
|
from wcs.qommon import misc, get_cfg
|
||||||
from ..qommon.backoffice.listing import pagination_links
|
from wcs.qommon.backoffice.listing import pagination_links
|
||||||
from wcs.roles import Role
|
from wcs.roles import Role
|
||||||
|
|
||||||
from ..qommon import ident
|
from wcs.qommon import ident
|
||||||
from ..qommon.ident.idp import is_idp_managing_user_attributes, is_idp_managing_user_roles
|
from wcs.qommon.ident.idp import is_idp_managing_user_attributes, is_idp_managing_user_roles
|
||||||
from ..qommon.form import *
|
from wcs.qommon.form import *
|
||||||
from ..qommon.admin.emails import EmailsDirectory
|
from wcs.qommon.admin.emails import EmailsDirectory
|
||||||
from ..qommon.backoffice.menu import html_top
|
from wcs.qommon.backoffice.menu import html_top
|
||||||
from ..qommon.admin.menu import error_page
|
from wcs.qommon.admin.menu import error_page
|
||||||
|
|
||||||
class UserUI(object):
|
class UserUI(object):
|
||||||
def __init__(self, user):
|
def __init__(self, user):
|
||||||
|
@ -310,7 +313,7 @@ class UsersDirectory(Directory):
|
||||||
checked_roles = None
|
checked_roles = None
|
||||||
if get_request().form.get('filter'):
|
if get_request().form.get('filter'):
|
||||||
checked_roles = get_request().form.get('role', [])
|
checked_roles = get_request().form.get('role', [])
|
||||||
if type(checked_roles) in (str, unicode):
|
if isinstance(checked_roles, six.string_types):
|
||||||
checked_roles = [checked_roles]
|
checked_roles = [checked_roles]
|
||||||
|
|
||||||
if checked_roles:
|
if checked_roles:
|
||||||
|
@ -432,9 +435,7 @@ class UsersDirectory(Directory):
|
||||||
r += htmltext('<h3>%s</h3>') % _('Search')
|
r += htmltext('<h3>%s</h3>') % _('Search')
|
||||||
if get_request().form.get('q'):
|
if get_request().form.get('q'):
|
||||||
q = get_request().form.get('q')
|
q = get_request().form.get('q')
|
||||||
if type(q) is not unicode:
|
r += htmltext('<input name="q" value="%s">') % force_str(q)
|
||||||
q = unicode(q, get_publisher().site_charset)
|
|
||||||
r += htmltext('<input name="q" value="%s">') % q.encode(get_publisher().site_charset)
|
|
||||||
else:
|
else:
|
||||||
r += htmltext('<input name="q">')
|
r += htmltext('<input name="q">')
|
||||||
r += htmltext('<input type="submit" value="%s"/>') % _('Search')
|
r += htmltext('<input type="submit" value="%s"/>') % _('Search')
|
||||||
|
|
|
@ -16,24 +16,28 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from subprocess import Popen, PIPE
|
from subprocess import Popen, PIPE
|
||||||
import textwrap
|
import textwrap
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
|
from django.utils import six
|
||||||
|
from django.utils.encoding import force_bytes, force_text
|
||||||
from django.utils.six import StringIO
|
from django.utils.six import StringIO
|
||||||
|
|
||||||
from quixote import redirect, get_publisher
|
from quixote import redirect, get_publisher
|
||||||
from quixote.directory import Directory
|
from quixote.directory import Directory
|
||||||
from quixote.html import TemplateIO, htmltext
|
from quixote.html import TemplateIO, htmltext
|
||||||
|
|
||||||
from ..qommon import _
|
from wcs.qommon import _, force_str
|
||||||
from ..qommon import errors
|
from wcs.qommon import errors
|
||||||
from ..qommon import misc
|
from wcs.qommon import misc
|
||||||
from ..qommon.form import *
|
from wcs.qommon.form import *
|
||||||
from ..qommon.backoffice.menu import html_top
|
from wcs.qommon.backoffice.menu import html_top
|
||||||
from ..qommon.admin.menu import command_icon
|
from wcs.qommon.admin.menu import command_icon
|
||||||
from ..qommon import get_logger
|
from wcs.qommon import get_logger
|
||||||
|
|
||||||
from wcs.workflows import *
|
from wcs.workflows import *
|
||||||
from wcs.carddef import CardDef
|
from wcs.carddef import CardDef
|
||||||
|
@ -114,7 +118,7 @@ def graphviz_post_treatment(content, colours, include=False):
|
||||||
remove_tag(root, TITLE)
|
remove_tag(root, TITLE)
|
||||||
for child in root:
|
for child in root:
|
||||||
adjust_style(child, child, colours)
|
adjust_style(child, child, colours)
|
||||||
return ET.tostring(tree)
|
return force_text(ET.tostring(tree))
|
||||||
|
|
||||||
def graphviz(workflow, url_prefix='', select=None, svg=True,
|
def graphviz(workflow, url_prefix='', select=None, svg=True,
|
||||||
include=False):
|
include=False):
|
||||||
|
@ -142,23 +146,23 @@ def graphviz(workflow, url_prefix='', select=None, svg=True,
|
||||||
|
|
||||||
colours = {}
|
colours = {}
|
||||||
revert_colours = {}
|
revert_colours = {}
|
||||||
print >>out, 'digraph main {'
|
print('digraph main {', file=out)
|
||||||
# print >>out, 'graph [ rankdir=LR ];'
|
# print >>out, 'graph [ rankdir=LR ];'
|
||||||
print >>out, 'node [shape=box,style=filled];'
|
print('node [shape=box,style=filled];', file=out)
|
||||||
print >>out, 'edge [];'
|
print('edge [];', file=out)
|
||||||
for status in workflow.possible_status:
|
for status in workflow.possible_status:
|
||||||
i = status.id
|
i = status.id
|
||||||
print >>out, 'status%s' % i,
|
print('status%s' % i, end=' ', file=out)
|
||||||
print >>out, '[label="%s"' % status.name.replace('"', "'"),
|
print('[label="%s"' % status.name.replace('"', "'"), end= ' ', file=out)
|
||||||
if select == str(i):
|
if select == str(i):
|
||||||
print >>out, ',id=current_status'
|
print(',id=current_status', file=out)
|
||||||
if status.colour:
|
if status.colour:
|
||||||
if status.colour not in colours:
|
if status.colour not in colours:
|
||||||
colours[status.colour] = graphviz_colours.pop()
|
colours[status.colour] = graphviz_colours.pop()
|
||||||
revert_colours[colours[status.colour]] = status.colour
|
revert_colours[colours[status.colour]] = status.colour
|
||||||
print >>out, ',color=%s' % colours[status.colour]
|
print(',color=%s' % colours[status.colour], file=out)
|
||||||
print >>out, ',class=%s' % colours[status.colour]
|
print(',class=%s' % colours[status.colour], file=out)
|
||||||
print >>out, ' URL="%sstatus/%s/"];' % (url_prefix, i)
|
print(' URL="%sstatus/%s/"];' % (url_prefix, i), file=out)
|
||||||
|
|
||||||
for status in workflow.possible_status:
|
for status in workflow.possible_status:
|
||||||
i = status.id
|
i = status.id
|
||||||
|
@ -173,23 +177,25 @@ def graphviz(workflow, url_prefix='', select=None, svg=True,
|
||||||
# don't display multiple arrows for same action and target
|
# don't display multiple arrows for same action and target
|
||||||
# status
|
# status
|
||||||
continue
|
continue
|
||||||
print >>out, 'status%s -> status%s' % (i, next_id)
|
print('status%s -> status%s' % (i, next_id), file=out)
|
||||||
done[next_id] = True
|
done[next_id] = True
|
||||||
label = item.get_jump_label(target_id=next_id)
|
label = item.get_jump_label(target_id=next_id)
|
||||||
label = label.replace('"', '\\"')
|
label = label.replace('"', '\\"')
|
||||||
label = label.decode('utf8')
|
if six.PY2:
|
||||||
|
label = label.decode('utf8')
|
||||||
label = textwrap.fill(label, 20, break_long_words=False)
|
label = textwrap.fill(label, 20, break_long_words=False)
|
||||||
label = label.encode('utf8')
|
if six.PY2:
|
||||||
|
label = label.encode('utf8')
|
||||||
label = label.replace('\n', '\\n')
|
label = label.replace('\n', '\\n')
|
||||||
print >>out, '[label="%s"' % label,
|
print('[label="%s"' % label, end=' ', file=out)
|
||||||
print >>out, ',URL="%s%s"]' % (url_prefix, url)
|
print(',URL="%s%s"]' % (url_prefix, url), file=out)
|
||||||
|
|
||||||
print >>out, '}'
|
print('}', file=out)
|
||||||
out = out.getvalue()
|
out = out.getvalue()
|
||||||
if svg:
|
if svg:
|
||||||
try:
|
try:
|
||||||
process = Popen(['dot', '-Tsvg'], stdin=PIPE, stdout=PIPE)
|
process = Popen(['dot', '-Tsvg'], stdin=PIPE, stdout=PIPE)
|
||||||
out, err = process.communicate(out)
|
out, err = process.communicate(force_bytes(out))
|
||||||
if process.returncode != 0:
|
if process.returncode != 0:
|
||||||
return ''
|
return ''
|
||||||
except OSError:
|
except OSError:
|
||||||
|
@ -717,7 +723,7 @@ class WorkflowStatusPage(Directory):
|
||||||
if form.is_submitted() and not form.has_errors():
|
if form.is_submitted() and not form.has_errors():
|
||||||
hide_status = form.get_widget('hide_status_from_user').parse()
|
hide_status = form.get_widget('hide_status_from_user').parse()
|
||||||
if hide_status:
|
if hide_status:
|
||||||
self.status.visibility = self.workflow.roles.keys()
|
self.status.visibility = list(self.workflow.roles.keys())
|
||||||
else:
|
else:
|
||||||
self.status.visibility = None
|
self.status.visibility = None
|
||||||
self.status.colour = form.get_widget('colour').parse() or 'ffffff'
|
self.status.colour = form.get_widget('colour').parse() or 'ffffff'
|
||||||
|
@ -966,7 +972,7 @@ class FunctionsDirectory(Directory):
|
||||||
# restrictions if necessary
|
# restrictions if necessary
|
||||||
for status in self.workflow.possible_status:
|
for status in self.workflow.possible_status:
|
||||||
if status.get_visibility_restricted_roles():
|
if status.get_visibility_restricted_roles():
|
||||||
status.visibility = self.workflow.roles.keys()
|
status.visibility = list(self.workflow.roles.keys())
|
||||||
self.workflow.store()
|
self.workflow.store()
|
||||||
return redirect('..')
|
return redirect('..')
|
||||||
|
|
||||||
|
@ -1441,8 +1447,8 @@ class WorkflowPage(Directory):
|
||||||
r += htmltext(' <span class="change">(<a rel="popup" href="functions/new">%s</a>)</span>') % _('add function')
|
r += htmltext(' <span class="change">(<a rel="popup" href="functions/new">%s</a>)</span>') % _('add function')
|
||||||
r += htmltext('</h3>')
|
r += htmltext('</h3>')
|
||||||
r += htmltext('<ul id="roles-list" class="biglist">')
|
r += htmltext('<ul id="roles-list" class="biglist">')
|
||||||
workflow_roles = (self.workflow.roles or {}).items()
|
workflow_roles = list((self.workflow.roles or {}).items())
|
||||||
workflow_roles.sort(key=lambda x: -1 if x[0] == '_receiver' else misc.simplify(x[1]))
|
workflow_roles.sort(key=lambda x: '' if x[0] == '_receiver' else misc.simplify(x[1]))
|
||||||
for key, label in workflow_roles:
|
for key, label in workflow_roles:
|
||||||
r += htmltext('<li class="biglistitem">')
|
r += htmltext('<li class="biglistitem">')
|
||||||
if not str(self.workflow.id).startswith('_'):
|
if not str(self.workflow.id).startswith('_'):
|
||||||
|
@ -1669,7 +1675,7 @@ class WorkflowPage(Directory):
|
||||||
response.set_content_type('application/x-wcs-form')
|
response.set_content_type('application/x-wcs-form')
|
||||||
response.set_header('content-disposition',
|
response.set_header('content-disposition',
|
||||||
'attachment; filename=workflow-%s.wcs' % misc.simplify(self.workflow.name))
|
'attachment; filename=workflow-%s.wcs' % misc.simplify(self.workflow.name))
|
||||||
return '<?xml version="1.0" encoding="utf-8"?>\n' + ET.tostring(x)
|
return '<?xml version="1.0"?>\n' + force_str(ET.tostring(x))
|
||||||
|
|
||||||
def get_new_status_form(self):
|
def get_new_status_form(self):
|
||||||
r = TemplateIO(html=True)
|
r = TemplateIO(html=True)
|
||||||
|
|
|
@ -18,10 +18,10 @@ from quixote import redirect
|
||||||
from quixote.directory import Directory
|
from quixote.directory import Directory
|
||||||
from quixote.html import TemplateIO, htmltext
|
from quixote.html import TemplateIO, htmltext
|
||||||
|
|
||||||
from ..qommon import _
|
from wcs.qommon import _
|
||||||
from ..qommon import errors
|
from wcs.qommon import errors
|
||||||
from ..qommon.form import *
|
from wcs.qommon.form import *
|
||||||
from ..qommon.backoffice.menu import html_top
|
from wcs.qommon.backoffice.menu import html_top
|
||||||
from wcs.wscalls import NamedWsCall, WsCallRequestWidget
|
from wcs.wscalls import NamedWsCall, WsCallRequestWidget
|
||||||
|
|
||||||
class NamedWsCallUI(object):
|
class NamedWsCallUI(object):
|
||||||
|
|
35
wcs/api.py
35
wcs/api.py
|
@ -22,6 +22,7 @@ import sys
|
||||||
from quixote import get_request, get_publisher, get_response, get_session
|
from quixote import get_request, get_publisher, get_response, get_session
|
||||||
from quixote.directory import Directory
|
from quixote.directory import Directory
|
||||||
|
|
||||||
|
from django.utils.encoding import force_text
|
||||||
from django.utils.six.moves.urllib import parse as urllib
|
from django.utils.six.moves.urllib import parse as urllib
|
||||||
from django.http import HttpResponse, HttpResponseBadRequest
|
from django.http import HttpResponse, HttpResponseBadRequest
|
||||||
|
|
||||||
|
@ -232,14 +233,12 @@ class ApiFormsDirectory(Directory):
|
||||||
raise TraversalError()
|
raise TraversalError()
|
||||||
|
|
||||||
self.check_access()
|
self.check_access()
|
||||||
get_request().user = get_user_from_api_query_string() or get_request().user
|
get_request()._user = get_user_from_api_query_string() or get_request().user
|
||||||
|
|
||||||
if FormDef.count() == 0:
|
if FormDef.count() == 0:
|
||||||
# early return, this avoids running a query against a missing SQL view.
|
# early return, this avoids running a query against a missing SQL view.
|
||||||
get_response().set_content_type('application/json')
|
get_response().set_content_type('application/json')
|
||||||
return json.dumps({'data': []},
|
return json.dumps({'data': []}, cls=misc.JSONEncoder)
|
||||||
cls=misc.JSONEncoder,
|
|
||||||
encoding=get_publisher().site_charset)
|
|
||||||
|
|
||||||
from wcs import sql
|
from wcs import sql
|
||||||
|
|
||||||
|
@ -289,16 +288,14 @@ class ApiFormsDirectory(Directory):
|
||||||
for x in formdatas]
|
for x in formdatas]
|
||||||
|
|
||||||
get_response().set_content_type('application/json')
|
get_response().set_content_type('application/json')
|
||||||
return json.dumps({'data': output},
|
return json.dumps({'data': output}, cls=misc.JSONEncoder)
|
||||||
cls=misc.JSONEncoder,
|
|
||||||
encoding=get_publisher().site_charset)
|
|
||||||
|
|
||||||
|
|
||||||
def geojson(self):
|
def geojson(self):
|
||||||
if not get_publisher().is_using_postgresql():
|
if not get_publisher().is_using_postgresql():
|
||||||
raise TraversalError()
|
raise TraversalError()
|
||||||
self.check_access()
|
self.check_access()
|
||||||
get_request().user = get_user_from_api_query_string() or get_request().user
|
get_request()._user = get_user_from_api_query_string() or get_request().user
|
||||||
return ManagementDirectory().geojson()
|
return ManagementDirectory().geojson()
|
||||||
|
|
||||||
def _q_lookup(self, component):
|
def _q_lookup(self, component):
|
||||||
|
@ -477,7 +474,7 @@ class ApiFormdefsDirectory(Directory):
|
||||||
# anonymous API call, mark authentication as required
|
# anonymous API call, mark authentication as required
|
||||||
authentication_required = True
|
authentication_required = True
|
||||||
|
|
||||||
formdict = {'title': unicode(formdef.name, charset),
|
formdict = {'title': force_text(formdef.name, charset),
|
||||||
'slug': formdef.url_name,
|
'slug': formdef.url_name,
|
||||||
'url': formdef.get_url(),
|
'url': formdef.get_url(),
|
||||||
'description': formdef.description or '',
|
'description': formdef.description or '',
|
||||||
|
@ -509,11 +506,11 @@ class ApiFormdefsDirectory(Directory):
|
||||||
formdict['functions'][wf_role_id] = workflow_function
|
formdict['functions'][wf_role_id] = workflow_function
|
||||||
|
|
||||||
if formdef.category:
|
if formdef.category:
|
||||||
formdict['category'] = unicode(formdef.category.name, charset)
|
formdict['category'] = force_text(formdef.category.name, charset)
|
||||||
formdict['category_slug'] = unicode(formdef.category.url_name, charset)
|
formdict['category_slug'] = force_text(formdef.category.url_name, charset)
|
||||||
formdict['category_position'] = (formdef.category.position or 0)
|
formdict['category_position'] = (formdef.category.position or 0)
|
||||||
else:
|
else:
|
||||||
formdict['category_position'] = sys.maxint
|
formdict['category_position'] = sys.maxsize
|
||||||
|
|
||||||
list_forms.append(formdict)
|
list_forms.append(formdict)
|
||||||
|
|
||||||
|
@ -536,7 +533,7 @@ class ApiFormdefsDirectory(Directory):
|
||||||
list_forms = self.get_list_forms(user, list_all_forms,
|
list_forms = self.get_list_forms(user, list_all_forms,
|
||||||
backoffice_submission=backoffice_submission)
|
backoffice_submission=backoffice_submission)
|
||||||
|
|
||||||
list_forms.sort(lambda x, y: cmp(x['category_position'], y['category_position']))
|
list_forms.sort(key=lambda x: x['category_position'])
|
||||||
for formdict in list_forms:
|
for formdict in list_forms:
|
||||||
del formdict['category_position']
|
del formdict['category_position']
|
||||||
|
|
||||||
|
@ -581,11 +578,11 @@ class ApiCategoriesDirectory(Directory):
|
||||||
all_formdefs = FormDef.select(order_by='name', ignore_errors=True, lightweight=True)
|
all_formdefs = FormDef.select(order_by='name', ignore_errors=True, lightweight=True)
|
||||||
for category in categories:
|
for category in categories:
|
||||||
d = {}
|
d = {}
|
||||||
d['title'] = unicode(category.name, charset)
|
d['title'] = force_text(category.name, charset)
|
||||||
d['slug'] = category.url_name
|
d['slug'] = category.url_name
|
||||||
d['url'] = category.get_url()
|
d['url'] = category.get_url()
|
||||||
if category.description:
|
if category.description:
|
||||||
d['description'] = unicode(str(category.get_description_html_text()), charset)
|
d['description'] = force_text(str(category.get_description_html_text()), charset)
|
||||||
formdefs = ApiFormdefsDirectory(category).get_list_forms(user,
|
formdefs = ApiFormdefsDirectory(category).get_list_forms(user,
|
||||||
formdefs=all_formdefs, list_all_forms=list_all_forms,
|
formdefs=all_formdefs, list_all_forms=list_all_forms,
|
||||||
backoffice_submission=backoffice_submission)
|
backoffice_submission=backoffice_submission)
|
||||||
|
@ -596,7 +593,7 @@ class ApiCategoriesDirectory(Directory):
|
||||||
for formdef in formdefs:
|
for formdef in formdefs:
|
||||||
for keyword in formdef['keywords']:
|
for keyword in formdef['keywords']:
|
||||||
keywords[keyword] = True
|
keywords[keyword] = True
|
||||||
d['keywords'] = keywords.keys()
|
d['keywords'] = list(keywords.keys())
|
||||||
if get_request().form.get('full') == 'on':
|
if get_request().form.get('full') == 'on':
|
||||||
d['forms'] = formdefs
|
d['forms'] = formdefs
|
||||||
list_categories.append(d)
|
list_categories.append(d)
|
||||||
|
@ -650,7 +647,7 @@ class ApiUserDirectory(Directory):
|
||||||
for formdef in formdefs:
|
for formdef in formdefs:
|
||||||
user_forms.extend(formdef.data_class().get_with_indexed_value(
|
user_forms.extend(formdef.data_class().get_with_indexed_value(
|
||||||
'user_id', user.id))
|
'user_id', user.id))
|
||||||
user_forms.sort(lambda x, y: cmp(x.receipt_time, y.receipt_time))
|
user_forms.sort(key=lambda x: x.receipt_time)
|
||||||
if get_request().form.get('sort') == 'desc':
|
if get_request().form.get('sort') == 'desc':
|
||||||
user_forms.reverse()
|
user_forms.reverse()
|
||||||
return user_forms
|
return user_forms
|
||||||
|
@ -710,9 +707,7 @@ class ApiUserDirectory(Directory):
|
||||||
formdata_dict['readable'] = getattr(form, 'readable', True)
|
formdata_dict['readable'] = getattr(form, 'readable', True)
|
||||||
result.append(formdata_dict)
|
result.append(formdata_dict)
|
||||||
|
|
||||||
return json.dumps({'err': 0, 'data': result},
|
return json.dumps({'err': 0, 'data': result}, cls=misc.JSONEncoder)
|
||||||
cls=misc.JSONEncoder,
|
|
||||||
encoding=get_publisher().site_charset)
|
|
||||||
|
|
||||||
|
|
||||||
class ApiUsersDirectory(Directory):
|
class ApiUsersDirectory(Directory):
|
||||||
|
|
|
@ -23,6 +23,8 @@ import os
|
||||||
import errno
|
import errno
|
||||||
import calendar
|
import calendar
|
||||||
|
|
||||||
|
from django.utils import six
|
||||||
|
from django.utils.encoding import force_bytes, force_text
|
||||||
from django.utils.six.moves.urllib import parse as urllib
|
from django.utils.six.moves.urllib import parse as urllib
|
||||||
from django.utils.six.moves.urllib import parse as urlparse
|
from django.utils.six.moves.urllib import parse as urlparse
|
||||||
|
|
||||||
|
@ -40,27 +42,28 @@ def is_url_signed(utcnow=None, duration=DEFAULT_DURATION):
|
||||||
if not query_string:
|
if not query_string:
|
||||||
return False
|
return False
|
||||||
signature = get_request().form.get('signature')
|
signature = get_request().form.get('signature')
|
||||||
if not isinstance(signature, basestring):
|
if not isinstance(signature, six.string_types):
|
||||||
return False
|
return False
|
||||||
|
signature = force_bytes(signature)
|
||||||
# verify signature
|
# verify signature
|
||||||
orig = get_request().form.get('orig')
|
orig = get_request().form.get('orig')
|
||||||
if not isinstance(orig, basestring):
|
if not isinstance(orig, six.string_types):
|
||||||
raise AccessForbiddenError('missing/multiple orig field')
|
raise AccessForbiddenError('missing/multiple orig field')
|
||||||
key = get_publisher().get_site_option(orig, 'api-secrets')
|
key = get_publisher().get_site_option(orig, 'api-secrets')
|
||||||
if not key:
|
if not key:
|
||||||
raise AccessForbiddenError('invalid orig')
|
raise AccessForbiddenError('invalid orig')
|
||||||
algo = get_request().form.get('algo')
|
algo = get_request().form.get('algo')
|
||||||
if not isinstance(algo, basestring):
|
if not isinstance(algo, six.string_types):
|
||||||
raise AccessForbiddenError('missing/multiple algo field')
|
raise AccessForbiddenError('missing/multiple algo field')
|
||||||
try:
|
try:
|
||||||
algo = getattr(hashlib, algo)
|
algo = getattr(hashlib, algo)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AccessForbiddenError('invalid algo')
|
raise AccessForbiddenError('invalid algo')
|
||||||
if signature != base64.standard_b64encode(
|
if signature != base64.standard_b64encode(
|
||||||
hmac.new(key, query_string[:query_string.find('&signature=')], algo).digest()):
|
hmac.new(force_bytes(key), force_bytes(query_string[:query_string.find('&signature=')]), algo).digest()):
|
||||||
raise AccessForbiddenError('invalid signature')
|
raise AccessForbiddenError('invalid signature')
|
||||||
timestamp = get_request().form.get('timestamp')
|
timestamp = get_request().form.get('timestamp')
|
||||||
if not isinstance(timestamp, basestring):
|
if not isinstance(timestamp, six.string_types):
|
||||||
raise AccessForbiddenError('missing/multiple timestamp field')
|
raise AccessForbiddenError('missing/multiple timestamp field')
|
||||||
try:
|
try:
|
||||||
timestamp = datetime.datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%SZ')
|
timestamp = datetime.datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%SZ')
|
||||||
|
@ -106,7 +109,7 @@ def get_user_from_api_query_string(api_name=None):
|
||||||
# we do not handle other authentication schemes
|
# we do not handle other authentication schemes
|
||||||
raise AccessForbiddenError('unhandled authorization header')
|
raise AccessForbiddenError('unhandled authorization header')
|
||||||
auth_header = auth_header.split(' ', 1)[1]
|
auth_header = auth_header.split(' ', 1)[1]
|
||||||
username, password = base64.decodestring(auth_header).split(':', 1)
|
username, password = force_text(base64.decodestring(force_bytes(auth_header))).split(':', 1)
|
||||||
configured_password = get_publisher().get_site_option(
|
configured_password = get_publisher().get_site_option(
|
||||||
username, section='api-http-auth-%s' % api_name)
|
username, section='api-http-auth-%s' % api_name)
|
||||||
if configured_password != password:
|
if configured_password != password:
|
||||||
|
@ -118,7 +121,7 @@ def get_user_from_api_query_string(api_name=None):
|
||||||
user = None
|
user = None
|
||||||
if get_request().form.get('email'):
|
if get_request().form.get('email'):
|
||||||
email = get_request().form.get('email')
|
email = get_request().form.get('email')
|
||||||
if not isinstance(email, basestring):
|
if not isinstance(email, six.string_types):
|
||||||
raise AccessForbiddenError('multiple email field')
|
raise AccessForbiddenError('multiple email field')
|
||||||
users = list(get_publisher().user_class.get_users_with_email(email))
|
users = list(get_publisher().user_class.get_users_with_email(email))
|
||||||
if users:
|
if users:
|
||||||
|
@ -127,7 +130,7 @@ def get_user_from_api_query_string(api_name=None):
|
||||||
raise AccessForbiddenError('unknown email')
|
raise AccessForbiddenError('unknown email')
|
||||||
elif get_request().form.get('NameID'):
|
elif get_request().form.get('NameID'):
|
||||||
ni = get_request().form.get('NameID')
|
ni = get_request().form.get('NameID')
|
||||||
if not isinstance(ni, basestring):
|
if not isinstance(ni, six.string_types):
|
||||||
raise AccessForbiddenError('multiple NameID field')
|
raise AccessForbiddenError('multiple NameID field')
|
||||||
users = list(get_publisher().user_class.get_users_with_name_identifier(ni))
|
users = list(get_publisher().user_class.get_users_with_name_identifier(ni))
|
||||||
if users:
|
if users:
|
||||||
|
@ -168,7 +171,7 @@ def sign_query(query, key, algo='sha256', timestamp=None, nonce=None):
|
||||||
|
|
||||||
def sign_string(s, key, algo='sha256', timedelta=30):
|
def sign_string(s, key, algo='sha256', timedelta=30):
|
||||||
digestmod = getattr(hashlib, algo)
|
digestmod = getattr(hashlib, algo)
|
||||||
hash = hmac.HMAC(key, digestmod=digestmod, msg=s)
|
hash = hmac.HMAC(force_bytes(key), digestmod=digestmod, msg=force_bytes(s))
|
||||||
return hash.digest()
|
return hash.digest()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -26,14 +26,16 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
xlwt = None
|
xlwt = None
|
||||||
|
|
||||||
|
from django.utils import six
|
||||||
|
from django.utils.encoding import force_text
|
||||||
from django.utils.six.moves.urllib import parse as urllib
|
from django.utils.six.moves.urllib import parse as urllib
|
||||||
from django.utils.six import StringIO
|
from django.utils.six import BytesIO, StringIO
|
||||||
|
|
||||||
from quixote import get_session, get_publisher, get_request, get_response, redirect
|
from quixote import get_session, get_publisher, get_request, get_response, redirect
|
||||||
from quixote.directory import Directory
|
from quixote.directory import Directory
|
||||||
from quixote.html import TemplateIO, htmltext, htmlescape
|
from quixote.html import TemplateIO, htmltext, htmlescape
|
||||||
|
|
||||||
from ..qommon import _, ngettext, ezt
|
from ..qommon import _, ngettext, ezt, force_str
|
||||||
from ..qommon.admin.emails import EmailsDirectory
|
from ..qommon.admin.emails import EmailsDirectory
|
||||||
from ..qommon.admin.menu import command_icon
|
from ..qommon.admin.menu import command_icon
|
||||||
from ..qommon.backoffice.menu import html_top
|
from ..qommon.backoffice.menu import html_top
|
||||||
|
@ -263,7 +265,7 @@ class UserViewDirectory(Directory):
|
||||||
categories[formdata.formdef.category_id] = formdata.formdef.category
|
categories[formdata.formdef.category_id] = formdata.formdef.category
|
||||||
formdata_by_category[formdata.formdef.category_id] = []
|
formdata_by_category[formdata.formdef.category_id] = []
|
||||||
formdata_by_category[formdata.formdef.category_id].append(formdata)
|
formdata_by_category[formdata.formdef.category_id].append(formdata)
|
||||||
cats = categories.values()
|
cats = list(categories.values())
|
||||||
Category.sort_by_position(cats)
|
Category.sort_by_position(cats)
|
||||||
for cat in cats:
|
for cat in cats:
|
||||||
r += htmltext('<div class="bo-block">')
|
r += htmltext('<div class="bo-block">')
|
||||||
|
@ -337,10 +339,8 @@ class UsersViewDirectory(Directory):
|
||||||
|
|
||||||
r += htmltext('<h3>%s</h3>') % _('Search')
|
r += htmltext('<h3>%s</h3>') % _('Search')
|
||||||
if get_request().form.get('q'):
|
if get_request().form.get('q'):
|
||||||
q = get_request().form.get('q')
|
q = force_text(get_request().form.get('q'))
|
||||||
if type(q) is not unicode:
|
r += htmltext('<input name="q" value="%s">') % force_str(q)
|
||||||
q = unicode(q, get_publisher().site_charset)
|
|
||||||
r += htmltext('<input name="q" value="%s">') % q.encode(get_publisher().site_charset)
|
|
||||||
else:
|
else:
|
||||||
r += htmltext('<input name="q">')
|
r += htmltext('<input name="q">')
|
||||||
r += htmltext('<input type="submit" value="%s"/>') % _('Search')
|
r += htmltext('<input type="submit" value="%s"/>') % _('Search')
|
||||||
|
@ -1179,10 +1179,8 @@ class FormPage(Directory):
|
||||||
if get_publisher().is_using_postgresql():
|
if get_publisher().is_using_postgresql():
|
||||||
r += htmltext('<h3>%s</h3>') % _('Search')
|
r += htmltext('<h3>%s</h3>') % _('Search')
|
||||||
if get_request().form.get('q'):
|
if get_request().form.get('q'):
|
||||||
q = get_request().form.get('q')
|
q = force_text(get_request().form.get('q'))
|
||||||
if type(q) is not unicode:
|
r += htmltext('<input class="inline-input" name="q" value="%s">') % force_str(q)
|
||||||
q = unicode(q, get_publisher().site_charset)
|
|
||||||
r += htmltext('<input class="inline-input" name="q" value="%s">') % q.encode(get_publisher().site_charset)
|
|
||||||
else:
|
else:
|
||||||
r += htmltext('<input class="inline-input" name="q">')
|
r += htmltext('<input class="inline-input" name="q">')
|
||||||
r += htmltext('<input type="submit" class="side-button" value="%s"/>') % _('Search')
|
r += htmltext('<input type="submit" class="side-button" value="%s"/>') % _('Search')
|
||||||
|
@ -1638,7 +1636,7 @@ class FormPage(Directory):
|
||||||
elem = elem[:32760] + ' [...]'
|
elem = elem[:32760] + ' [...]'
|
||||||
ws.write(i+1, j, elem)
|
ws.write(i+1, j, elem)
|
||||||
|
|
||||||
self.output = StringIO()
|
self.output = BytesIO()
|
||||||
w.save(self.output)
|
w.save(self.output)
|
||||||
|
|
||||||
if job:
|
if job:
|
||||||
|
@ -1698,7 +1696,7 @@ class FormPage(Directory):
|
||||||
data_field=item['field'],
|
data_field=item['field'],
|
||||||
native_value=item['native_value'])
|
native_value=item['native_value'])
|
||||||
|
|
||||||
self.output = StringIO()
|
self.output = BytesIO()
|
||||||
w.save(self.output)
|
w.save(self.output)
|
||||||
|
|
||||||
if job:
|
if job:
|
||||||
|
@ -1760,8 +1758,7 @@ class FormPage(Directory):
|
||||||
'receipt_time': filled.receipt_time,
|
'receipt_time': filled.receipt_time,
|
||||||
'last_update_time': filled.last_update_time} for filled in items]
|
'last_update_time': filled.last_update_time} for filled in items]
|
||||||
return json.dumps(output,
|
return json.dumps(output,
|
||||||
cls=misc.JSONEncoder,
|
cls=misc.JSONEncoder)
|
||||||
encoding=get_publisher().site_charset)
|
|
||||||
|
|
||||||
def geojson(self):
|
def geojson(self):
|
||||||
if not self.formdef.geolocations:
|
if not self.formdef.geolocations:
|
||||||
|
@ -1782,7 +1779,7 @@ class FormPage(Directory):
|
||||||
selected_filter, user=user, query=query, criterias=criterias)
|
selected_filter, user=user, query=query, criterias=criterias)
|
||||||
|
|
||||||
# only consider first key for now
|
# only consider first key for now
|
||||||
geoloc_key = self.formdef.geolocations.keys()[0]
|
geoloc_key = list(self.formdef.geolocations.keys())[0]
|
||||||
return json.dumps(geojson_formdatas(items, fields=fields))
|
return json.dumps(geojson_formdatas(items, fields=fields))
|
||||||
|
|
||||||
def ics(self):
|
def ics(self):
|
||||||
|
@ -1842,21 +1839,21 @@ class FormPage(Directory):
|
||||||
get_request().get_server().lower(),
|
get_request().get_server().lower(),
|
||||||
formdef.url_name,
|
formdef.url_name,
|
||||||
formdata.id)
|
formdata.id)
|
||||||
vevent.add('summary').value = unicode(formdata.get_display_name(), charset)
|
vevent.add('summary').value = force_text(formdata.get_display_name(), charset)
|
||||||
vevent.add('dtstart').value = make_datetime(formdata.data[start_date_field_id])
|
vevent.add('dtstart').value = make_datetime(formdata.data[start_date_field_id])
|
||||||
if end_date_field_id and formdata.data.get(end_date_field_id):
|
if end_date_field_id and formdata.data.get(end_date_field_id):
|
||||||
vevent.add('dtend').value = make_datetime(formdata.data[end_date_field_id])
|
vevent.add('dtend').value = make_datetime(formdata.data[end_date_field_id])
|
||||||
vevent.dtstart.value_param = 'DATE'
|
vevent.dtstart.value_param = 'DATE'
|
||||||
backoffice_url = formdata.get_url(backoffice=True)
|
backoffice_url = formdata.get_url(backoffice=True)
|
||||||
vevent.add('url').value = backoffice_url
|
vevent.add('url').value = backoffice_url
|
||||||
form_name = unicode(formdef.name, charset)
|
form_name = force_text(formdef.name, charset)
|
||||||
status_name = unicode(formdata.get_status_label(), charset)
|
status_name = force_text(formdata.get_status_label(), charset)
|
||||||
description = '%s | %s | %s\n' % (form_name, formdata.get_display_id(), status_name)
|
description = '%s | %s | %s\n' % (form_name, formdata.get_display_id(), status_name)
|
||||||
description += backoffice_url
|
description += backoffice_url
|
||||||
# TODO: improve performance by loading all users in one
|
# TODO: improve performance by loading all users in one
|
||||||
# single query before the loop
|
# single query before the loop
|
||||||
if formdata.user:
|
if formdata.user:
|
||||||
description += '\n%s' % unicode(formdata.user.get_display_name(), charset)
|
description += '\n%s' % force_text(formdata.user.get_display_name(), charset)
|
||||||
vevent.add('description').value = description
|
vevent.add('description').value = description
|
||||||
cal.add(vevent)
|
cal.add(vevent)
|
||||||
|
|
||||||
|
@ -2066,13 +2063,13 @@ class FormPage(Directory):
|
||||||
r += htmltext(' <li>%s %s</li>') % (_('Minimum Time:'), format_time(min_times))
|
r += htmltext(' <li>%s %s</li>') % (_('Minimum Time:'), format_time(min_times))
|
||||||
r += htmltext(' <li>%s %s</li>') % (_('Maximum Time:'), format_time(max_times))
|
r += htmltext(' <li>%s %s</li>') % (_('Maximum Time:'), format_time(max_times))
|
||||||
r += htmltext(' <li>%s %s</li>') % (_('Range:'), format_time(max_times - min_times))
|
r += htmltext(' <li>%s %s</li>') % (_('Range:'), format_time(max_times - min_times))
|
||||||
mean = sum_times / len_times
|
mean = sum_times // len_times
|
||||||
r += htmltext(' <li>%s %s</li>') % (_('Mean:'), format_time(mean))
|
r += htmltext(' <li>%s %s</li>') % (_('Mean:'), format_time(mean))
|
||||||
if len_times % 2:
|
if len_times % 2:
|
||||||
median = res_time_forms[len_times/2]
|
median = res_time_forms[len_times//2]
|
||||||
else:
|
else:
|
||||||
midpt = len_times/2
|
midpt = len_times//2
|
||||||
median = (res_time_forms[midpt-1]+res_time_forms[midpt])/2
|
median = (res_time_forms[midpt-1]+res_time_forms[midpt])//2
|
||||||
r += htmltext(' <li>%s %s</li>') % (_('Median:'), format_time(median))
|
r += htmltext(' <li>%s %s</li>') % (_('Median:'), format_time(median))
|
||||||
|
|
||||||
# variance...
|
# variance...
|
||||||
|
@ -2080,7 +2077,7 @@ class FormPage(Directory):
|
||||||
for t in res_time_forms:
|
for t in res_time_forms:
|
||||||
x += (t - mean)**2.0
|
x += (t - mean)**2.0
|
||||||
try:
|
try:
|
||||||
variance = x/(len_times+1)
|
variance = x//(len_times+1)
|
||||||
except:
|
except:
|
||||||
variance = 0
|
variance = 0
|
||||||
# not displayed since in square seconds which is not easy to grasp
|
# not displayed since in square seconds which is not easy to grasp
|
||||||
|
@ -2323,7 +2320,7 @@ class FormBackOfficeStatusPage(FormStatusPage):
|
||||||
categories[formdata.formdef.category_id] = formdata.formdef.category
|
categories[formdata.formdef.category_id] = formdata.formdef.category
|
||||||
formdata_by_category[formdata.formdef.category_id] = []
|
formdata_by_category[formdata.formdef.category_id] = []
|
||||||
formdata_by_category[formdata.formdef.category_id].append(formdata)
|
formdata_by_category[formdata.formdef.category_id].append(formdata)
|
||||||
cats = categories.values()
|
cats = list(categories.values())
|
||||||
Category.sort_by_position(cats)
|
Category.sort_by_position(cats)
|
||||||
if self.formdef.category_id in categories:
|
if self.formdef.category_id in categories:
|
||||||
# move current category to the top
|
# move current category to the top
|
||||||
|
@ -2525,12 +2522,12 @@ class FormBackOfficeStatusPage(FormStatusPage):
|
||||||
def safe(v):
|
def safe(v):
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
try:
|
try:
|
||||||
unicode(v, charset)
|
force_text(v, charset)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
v = repr(v)
|
v = repr(v)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
v = unicode(v).encode(charset)
|
v = force_text(v).encode(charset)
|
||||||
except:
|
except:
|
||||||
v = repr(v)
|
v = repr(v)
|
||||||
return v
|
return v
|
||||||
|
@ -2552,7 +2549,7 @@ class FormBackOfficeStatusPage(FormStatusPage):
|
||||||
k = safe(k)
|
k = safe(k)
|
||||||
r += htmltext('<li><code title="%s">%s</code>') % (k, k)
|
r += htmltext('<li><code title="%s">%s</code>') % (k, k)
|
||||||
r += htmltext(' <div class="value"><span>%s</span>') % ellipsize(safe(v), 10000)
|
r += htmltext(' <div class="value"><span>%s</span>') % ellipsize(safe(v), 10000)
|
||||||
if not isinstance(v, basestring):
|
if not isinstance(v, six.string_types):
|
||||||
r += htmltext(' <span class="type">(%r)</span>') % type(v)
|
r += htmltext(' <span class="type">(%r)</span>') % type(v)
|
||||||
r += htmltext('</div></li>')
|
r += htmltext('</div></li>')
|
||||||
r += htmltext('</div>')
|
r += htmltext('</div>')
|
||||||
|
|
|
@ -29,7 +29,6 @@ from ..qommon.form import *
|
||||||
|
|
||||||
from wcs.formdef import FormDef
|
from wcs.formdef import FormDef
|
||||||
|
|
||||||
import wcs.admin.bounces
|
|
||||||
import wcs.admin.categories
|
import wcs.admin.categories
|
||||||
import wcs.admin.forms
|
import wcs.admin.forms
|
||||||
import wcs.admin.roles
|
import wcs.admin.roles
|
||||||
|
@ -47,7 +46,6 @@ from . import data_management
|
||||||
class RootDirectory(BackofficeRootDirectory):
|
class RootDirectory(BackofficeRootDirectory):
|
||||||
_q_exports = ['', 'pending', 'statistics', ('menu.json', 'menu_json')]
|
_q_exports = ['', 'pending', 'statistics', ('menu.json', 'menu_json')]
|
||||||
|
|
||||||
bounces = wcs.admin.bounces.BouncesDirectory()
|
|
||||||
forms = wcs.admin.forms.FormsDirectory()
|
forms = wcs.admin.forms.FormsDirectory()
|
||||||
roles = wcs.admin.roles.RolesDirectory()
|
roles = wcs.admin.roles.RolesDirectory()
|
||||||
settings = wcs.admin.settings.SettingsDirectory()
|
settings = wcs.admin.settings.SettingsDirectory()
|
||||||
|
@ -69,7 +67,6 @@ class RootDirectory(BackofficeRootDirectory):
|
||||||
('workflows/', N_('Workflows Workshop'), {'sub': True}),
|
('workflows/', N_('Workflows Workshop'), {'sub': True}),
|
||||||
('users/', N_('Users'), {'check_display_function': roles.is_visible}),
|
('users/', N_('Users'), {'check_display_function': roles.is_visible}),
|
||||||
('roles/', N_('Roles'), {'check_display_function': roles.is_visible}),
|
('roles/', N_('Roles'), {'check_display_function': roles.is_visible}),
|
||||||
('bounces/', N_('Bounces'), {'check_display_function': bounces.is_visible}),
|
|
||||||
('settings/', N_('Settings')),
|
('settings/', N_('Settings')),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -159,7 +159,7 @@ class FormFillPage(PublicFormFillPage):
|
||||||
r += htmltext('<div class="submit-channel-selection" style="display: none;">')
|
r += htmltext('<div class="submit-channel-selection" style="display: none;">')
|
||||||
r += htmltext('<h3>%s</h3>') % _('Channel')
|
r += htmltext('<h3>%s</h3>') % _('Channel')
|
||||||
r += htmltext('<select>')
|
r += htmltext('<select>')
|
||||||
for channel_key, channel_label in [('', '-')] + FormData.get_submission_channels().items():
|
for channel_key, channel_label in [('', '-')] + list(FormData.get_submission_channels().items()):
|
||||||
selected = ''
|
selected = ''
|
||||||
if self.selected_submission_channel == channel_key:
|
if self.selected_submission_channel == channel_key:
|
||||||
selected = 'selected="selected"'
|
selected = 'selected="selected"'
|
||||||
|
|
|
@ -14,6 +14,8 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from django.utils import six
|
||||||
|
|
||||||
from wcs.formdata import FormData
|
from wcs.formdata import FormData
|
||||||
|
|
||||||
|
|
||||||
|
@ -39,6 +41,6 @@ class CardData(FormData):
|
||||||
if not field.varname:
|
if not field.varname:
|
||||||
continue
|
continue
|
||||||
value = self.data and self.data.get(field.id)
|
value = self.data and self.data.get(field.id)
|
||||||
if isinstance(value, basestring):
|
if isinstance(value, six.string_types):
|
||||||
item[field.varname] = value
|
item[field.varname] = value
|
||||||
return item
|
return item
|
||||||
|
|
|
@ -14,8 +14,8 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import new
|
|
||||||
import sys
|
import sys
|
||||||
|
import types
|
||||||
|
|
||||||
from quixote import get_publisher
|
from quixote import get_publisher
|
||||||
from .qommon import _
|
from .qommon import _
|
||||||
|
@ -24,6 +24,9 @@ from wcs.carddata import CardData
|
||||||
from wcs.formdef import FormDef
|
from wcs.formdef import FormDef
|
||||||
from wcs.workflows import Workflow
|
from wcs.workflows import Workflow
|
||||||
|
|
||||||
|
if not hasattr(types, 'ClassType'):
|
||||||
|
types.ClassType = type
|
||||||
|
|
||||||
|
|
||||||
class CardDef(FormDef):
|
class CardDef(FormDef):
|
||||||
_names = 'carddefs'
|
_names = 'carddefs'
|
||||||
|
@ -45,12 +48,12 @@ class CardDef(FormDef):
|
||||||
if (get_publisher().is_using_postgresql() and not mode == 'files') or mode == 'sql':
|
if (get_publisher().is_using_postgresql() and not mode == 'files') or mode == 'sql':
|
||||||
from . import sql
|
from . import sql
|
||||||
table_name = sql.get_formdef_table_name(self)
|
table_name = sql.get_formdef_table_name(self)
|
||||||
cls = new.classobj(self.url_name.title(), (sql.SqlCardData,),
|
cls = types.ClassType(self.url_name.title(), (sql.SqlCardData,),
|
||||||
{'_formdef': self,
|
{'_formdef': self,
|
||||||
'_table_name': table_name})
|
'_table_name': table_name})
|
||||||
actions = sql.do_formdef_tables(self)
|
actions = sql.do_formdef_tables(self)
|
||||||
else:
|
else:
|
||||||
cls = new.classobj(self.url_name.title(), (CardData,),
|
cls = types.ClassType(self.url_name.title(), (CardData,),
|
||||||
{'_names': 'card-%s' % self.internal_identifier,
|
{'_names': 'card-%s' % self.internal_identifier,
|
||||||
'_formdef': self})
|
'_formdef': self})
|
||||||
actions = []
|
actions = []
|
||||||
|
@ -78,7 +81,7 @@ class CardDef(FormDef):
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_default_workflow(cls):
|
def get_default_workflow(cls):
|
||||||
from wcs.workflows import EditableWorkflowStatusItem, ChoiceWorkflowStatusItem
|
from wcs.workflows import EditableWorkflowStatusItem, ChoiceWorkflowStatusItem
|
||||||
import wf.remove
|
from wcs.wf.remove import RemoveWorkflowStatusItem
|
||||||
workflow = Workflow(name=_('Default (cards)'))
|
workflow = Workflow(name=_('Default (cards)'))
|
||||||
workflow.id = '_carddef_default'
|
workflow.id = '_carddef_default'
|
||||||
workflow.roles = {
|
workflow.roles = {
|
||||||
|
@ -105,7 +108,7 @@ class CardDef(FormDef):
|
||||||
action_delete.parent = status
|
action_delete.parent = status
|
||||||
status.items.append(action_delete)
|
status.items.append(action_delete)
|
||||||
|
|
||||||
remove = wf.remove.RemoveWorkflowStatusItem()
|
remove = RemoveWorkflowStatusItem()
|
||||||
remove.id = '_remove'
|
remove.id = '_remove'
|
||||||
remove.parent = deleted_status
|
remove.parent = deleted_status
|
||||||
deleted_status.items.append(remove)
|
deleted_status.items.append(remove)
|
||||||
|
|
|
@ -66,21 +66,8 @@ class Category(XmlStorableObject):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def sort_by_position(cls, categories):
|
def sort_by_position(cls, categories):
|
||||||
def cmp_position(x, y):
|
# move categories with no defined position to the end
|
||||||
if x is None and y is None:
|
categories.sort(key=lambda x: x.position if x and x.position is not None else 10000)
|
||||||
return 0
|
|
||||||
if y is None:
|
|
||||||
return -1
|
|
||||||
if x is None:
|
|
||||||
return 1
|
|
||||||
if x.position == y.position:
|
|
||||||
return 0
|
|
||||||
if x.position is None:
|
|
||||||
return 1
|
|
||||||
if y.position is None:
|
|
||||||
return -1
|
|
||||||
return cmp(x.position, y.position)
|
|
||||||
categories.sort(cmp_position)
|
|
||||||
|
|
||||||
def remove_self(self):
|
def remove_self(self):
|
||||||
from .formdef import FormDef
|
from .formdef import FormDef
|
||||||
|
|
|
@ -19,6 +19,7 @@ import os
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
from django.utils import six
|
||||||
from django.utils.six.moves import configparser as ConfigParser
|
from django.utils.six.moves import configparser as ConfigParser
|
||||||
|
|
||||||
from quixote import get_publisher, get_request
|
from quixote import get_publisher, get_request
|
||||||
|
@ -31,7 +32,7 @@ from django.template import loader, TemplateDoesNotExist
|
||||||
from django.template.response import TemplateResponse
|
from django.template.response import TemplateResponse
|
||||||
from django.views.generic.base import TemplateView
|
from django.views.generic.base import TemplateView
|
||||||
|
|
||||||
from .qommon import template
|
from .qommon import force_str, template
|
||||||
from .qommon.publisher import get_cfg, set_publisher_class
|
from .qommon.publisher import get_cfg, set_publisher_class
|
||||||
from .publisher import WcsPublisher
|
from .publisher import WcsPublisher
|
||||||
from .qommon.http_request import HTTPRequest
|
from .qommon.http_request import HTTPRequest
|
||||||
|
@ -97,8 +98,8 @@ class CompatHTTPRequest(HTTPRequest):
|
||||||
self.django_request.quixote_request = self
|
self.django_request.quixote_request = self
|
||||||
self.response = None
|
self.response = None
|
||||||
request.environ['SCRIPT_NAME'] = str(request.environ['SCRIPT_NAME'])
|
request.environ['SCRIPT_NAME'] = str(request.environ['SCRIPT_NAME'])
|
||||||
request.environ['PATH_INFO'] = request.environ['PATH_INFO'].encode('utf-8')
|
request.environ['PATH_INFO'] = force_str(request.environ['PATH_INFO'])
|
||||||
self.META = self.django_request.META
|
self.environ = self.django_request.META
|
||||||
HTTPRequest.__init__(self, None, request.environ)
|
HTTPRequest.__init__(self, None, request.environ)
|
||||||
self.scheme = str(self.django_request.scheme)
|
self.scheme = str(self.django_request.scheme)
|
||||||
|
|
||||||
|
@ -117,23 +118,32 @@ class CompatHTTPRequest(HTTPRequest):
|
||||||
if not self.form:
|
if not self.form:
|
||||||
self.form = {}
|
self.form = {}
|
||||||
for k in self.django_request.POST:
|
for k in self.django_request.POST:
|
||||||
|
v = self.django_request.POST[k]
|
||||||
if k.endswith('[]'):
|
if k.endswith('[]'):
|
||||||
v = [x.encode(site_charset) for x in self.django_request.POST.getlist(k)]
|
v = [x for x in self.django_request.POST.getlist(k)]
|
||||||
else:
|
if six.PY2:
|
||||||
v = self.django_request.POST[k]
|
if k.endswith('[]'):
|
||||||
if isinstance(v, unicode):
|
v = [x.encode(site_charset) for x in v]
|
||||||
v = v.encode(site_charset)
|
else:
|
||||||
if isinstance(k, unicode):
|
v = self.django_request.POST[k]
|
||||||
k = k.encode(site_charset)
|
if isinstance(v, unicode):
|
||||||
|
v = v.encode(site_charset)
|
||||||
|
if isinstance(k, unicode):
|
||||||
|
k = k.encode(site_charset)
|
||||||
self.form[k] = v
|
self.form[k] = v
|
||||||
|
|
||||||
for k, upload_file in self.django_request.FILES.items():
|
for k, upload_file in self.django_request.FILES.items():
|
||||||
upload = Upload(upload_file.name.encode('utf-8'),
|
if six.PY2:
|
||||||
upload_file.content_type.encode('utf-8'),
|
upload = Upload(upload_file.name.encode('utf-8'),
|
||||||
upload_file.charset)
|
upload_file.content_type.encode('utf-8'),
|
||||||
|
upload_file.charset)
|
||||||
|
if isinstance(k, unicode):
|
||||||
|
k = k.encode(site_charset)
|
||||||
|
else:
|
||||||
|
upload = Upload(upload_file.name,
|
||||||
|
upload_file.content_type,
|
||||||
|
upload_file.charset)
|
||||||
upload.fp = upload_file.file
|
upload.fp = upload_file.file
|
||||||
if isinstance(k, unicode):
|
|
||||||
k = k.encode(site_charset)
|
|
||||||
self.form[k] = upload
|
self.form[k] = upload
|
||||||
|
|
||||||
def build_absolute_uri(self):
|
def build_absolute_uri(self):
|
||||||
|
|
|
@ -18,9 +18,9 @@ import sys
|
||||||
|
|
||||||
from quixote import get_publisher
|
from quixote import get_publisher
|
||||||
from django.template import Context, Template, TemplateSyntaxError
|
from django.template import Context, Template, TemplateSyntaxError
|
||||||
|
from django.utils.encoding import force_text
|
||||||
|
|
||||||
from .qommon import _, get_logger
|
from .qommon import _, get_logger, force_str
|
||||||
from .qommon.misc import site_encode
|
|
||||||
|
|
||||||
|
|
||||||
class ValidationError(ValueError):
|
class ValidationError(ValueError):
|
||||||
|
@ -90,11 +90,11 @@ class Condition(object):
|
||||||
try:
|
try:
|
||||||
compile(self.value, '<string>', 'eval')
|
compile(self.value, '<string>', 'eval')
|
||||||
except (SyntaxError, TypeError) as e:
|
except (SyntaxError, TypeError) as e:
|
||||||
raise ValidationError(_('syntax error: %s') % site_encode(e))
|
raise ValidationError(_('syntax error: %s') % force_str(force_text(e)))
|
||||||
|
|
||||||
def validate_django(self):
|
def validate_django(self):
|
||||||
try:
|
try:
|
||||||
Template('{%% load %s %%}{%% if %s %%}OK{%% endif %%}' % (
|
Template('{%% load %s %%}{%% if %s %%}OK{%% endif %%}' % (
|
||||||
get_publisher().get_default_templatetags_libraries(), self.value))
|
get_publisher().get_default_templatetags_libraries(), self.value))
|
||||||
except TemplateSyntaxError as e:
|
except TemplateSyntaxError as e:
|
||||||
raise ValidationError(_('syntax error: %s') % site_encode(e))
|
raise ValidationError(_('syntax error: %s') % force_str(force_text(e)))
|
||||||
|
|
|
@ -1,68 +0,0 @@
|
||||||
# Copyright (C) 1998-2006 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
|
|
||||||
# USA.
|
|
||||||
|
|
||||||
"""Contains all the common functionality for msg bounce scanning API.
|
|
||||||
|
|
||||||
This module can also be used as the basis for a bounce detection testing
|
|
||||||
framework. When run as a script, it expects two arguments, the listname and
|
|
||||||
the filename containing the bounce message.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
# If a bounce detector returns Stop, that means to just discard the message.
|
|
||||||
# An example is warning messages for temporary delivery problems. These
|
|
||||||
# shouldn't trigger a bounce notification, but we also don't want to send them
|
|
||||||
# on to the list administrator.
|
|
||||||
class _Stop:
|
|
||||||
pass
|
|
||||||
Stop = _Stop()
|
|
||||||
|
|
||||||
|
|
||||||
BOUNCE_PIPELINE = [
|
|
||||||
'DSN',
|
|
||||||
'Qmail',
|
|
||||||
'Postfix',
|
|
||||||
'Yahoo',
|
|
||||||
'Caiwireless',
|
|
||||||
'Exchange',
|
|
||||||
'Exim',
|
|
||||||
'Netscape',
|
|
||||||
'Compuserve',
|
|
||||||
'Microsoft',
|
|
||||||
'GroupWise',
|
|
||||||
'SMTP32',
|
|
||||||
'SimpleMatch',
|
|
||||||
'SimpleWarning',
|
|
||||||
'Yale',
|
|
||||||
'LLNL',
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# msg must be a mimetools.Message
|
|
||||||
def ScanMessages(mlist, msg):
|
|
||||||
for module in BOUNCE_PIPELINE:
|
|
||||||
modname = 'Mailman.Bouncers.' + module
|
|
||||||
__import__(modname)
|
|
||||||
addrs = sys.modules[modname].process(msg)
|
|
||||||
if addrs:
|
|
||||||
# Return addrs even if it is Stop. BounceRunner needs this info.
|
|
||||||
return addrs
|
|
||||||
return []
|
|
|
@ -1,45 +0,0 @@
|
||||||
# Copyright (C) 1998,1999,2000,2001,2002 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
|
|
||||||
"""Parse mystery style generated by MTA at caiwireless.net."""
|
|
||||||
|
|
||||||
import re
|
|
||||||
import email
|
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
tcre = re.compile(r'the following recipients did not receive this message:',
|
|
||||||
re.IGNORECASE)
|
|
||||||
acre = re.compile(r'<(?P<addr>[^>]*)>')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
if msg.get_content_type() <> 'multipart/mixed':
|
|
||||||
return None
|
|
||||||
# simple state machine
|
|
||||||
# 0 == nothing seen
|
|
||||||
# 1 == tag line seen
|
|
||||||
state = 0
|
|
||||||
# This format thinks it's a MIME, but it really isn't
|
|
||||||
for line in email.Iterators.body_line_iterator(msg):
|
|
||||||
line = line.strip()
|
|
||||||
if state == 0 and tcre.match(line):
|
|
||||||
state = 1
|
|
||||||
elif state == 1 and line:
|
|
||||||
mo = acre.match(line)
|
|
||||||
if not mo:
|
|
||||||
return None
|
|
||||||
return [mo.group('addr')]
|
|
|
@ -1,45 +0,0 @@
|
||||||
# Copyright (C) 1998,1999,2000,2001,2002 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
|
|
||||||
"""Compuserve has its own weird format for bounces."""
|
|
||||||
|
|
||||||
import re
|
|
||||||
import email
|
|
||||||
|
|
||||||
dcre = re.compile(r'your message could not be delivered', re.IGNORECASE)
|
|
||||||
acre = re.compile(r'Invalid receiver address: (?P<addr>.*)')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
# simple state machine
|
|
||||||
# 0 = nothing seen yet
|
|
||||||
# 1 = intro line seen
|
|
||||||
state = 0
|
|
||||||
addrs = []
|
|
||||||
for line in email.Iterators.body_line_iterator(msg):
|
|
||||||
if state == 0:
|
|
||||||
mo = dcre.search(line)
|
|
||||||
if mo:
|
|
||||||
state = 1
|
|
||||||
elif state == 1:
|
|
||||||
mo = dcre.search(line)
|
|
||||||
if mo:
|
|
||||||
break
|
|
||||||
mo = acre.search(line)
|
|
||||||
if mo:
|
|
||||||
addrs.append(mo.group('addr'))
|
|
||||||
return addrs
|
|
|
@ -1,101 +0,0 @@
|
||||||
# Copyright (C) 1998-2006 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
|
|
||||||
# USA.
|
|
||||||
|
|
||||||
"""Parse RFC 3464 (i.e. DSN) bounce formats.
|
|
||||||
|
|
||||||
RFC 3464 obsoletes 1894 which was the old DSN standard. This module has not
|
|
||||||
been audited for differences between the two.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from email.Iterators import typed_subpart_iterator
|
|
||||||
from email.Utils import parseaddr
|
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
from BouncerAPI import Stop
|
|
||||||
|
|
||||||
try:
|
|
||||||
True, False
|
|
||||||
except NameError:
|
|
||||||
True = 1
|
|
||||||
False = 0
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def check(msg):
|
|
||||||
# Iterate over each message/delivery-status subpart
|
|
||||||
addrs = []
|
|
||||||
for part in typed_subpart_iterator(msg, 'message', 'delivery-status'):
|
|
||||||
if not part.is_multipart():
|
|
||||||
# Huh?
|
|
||||||
continue
|
|
||||||
# Each message/delivery-status contains a list of Message objects
|
|
||||||
# which are the header blocks. Iterate over those too.
|
|
||||||
for msgblock in part.get_payload():
|
|
||||||
# We try to dig out the Original-Recipient (which is optional) and
|
|
||||||
# Final-Recipient (which is mandatory, but may not exactly match
|
|
||||||
# an address on our list). Some MTA's also use X-Actual-Recipient
|
|
||||||
# as a synonym for Original-Recipient, but some apparently use
|
|
||||||
# that for other purposes :(
|
|
||||||
#
|
|
||||||
# Also grok out Action so we can do something with that too.
|
|
||||||
action = msgblock.get('action', '').lower()
|
|
||||||
# Some MTAs have been observed that put comments on the action.
|
|
||||||
if action.startswith('delayed'):
|
|
||||||
return Stop
|
|
||||||
if not action.startswith('fail'):
|
|
||||||
# Some non-permanent failure, so ignore this block
|
|
||||||
continue
|
|
||||||
params = []
|
|
||||||
foundp = False
|
|
||||||
for header in ('original-recipient', 'final-recipient'):
|
|
||||||
for k, v in msgblock.get_params([], header):
|
|
||||||
if k.lower() == 'rfc822':
|
|
||||||
foundp = True
|
|
||||||
else:
|
|
||||||
params.append(k)
|
|
||||||
if foundp:
|
|
||||||
# Note that params should already be unquoted.
|
|
||||||
addrs.extend(params)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
# MAS: This is a kludge, but SMTP-GATEWAY01.intra.home.dk
|
|
||||||
# has a final-recipient with an angle-addr and no
|
|
||||||
# address-type parameter at all. Non-compliant, but ...
|
|
||||||
for param in params:
|
|
||||||
if param.startswith('<') and param.endswith('>'):
|
|
||||||
addrs.append(param[1:-1])
|
|
||||||
# Uniquify
|
|
||||||
rtnaddrs = {}
|
|
||||||
for a in addrs:
|
|
||||||
if a is not None:
|
|
||||||
realname, a = parseaddr(a)
|
|
||||||
rtnaddrs[a] = True
|
|
||||||
return rtnaddrs.keys()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
# A DSN has been seen wrapped with a "legal disclaimer" by an outgoing MTA
|
|
||||||
# in a multipart/mixed outer part.
|
|
||||||
if msg.is_multipart() and msg.get_content_subtype() == 'mixed':
|
|
||||||
msg = msg.get_payload()[0]
|
|
||||||
# The report-type parameter should be "delivery-status", but it seems that
|
|
||||||
# some DSN generating MTAs don't include this on the Content-Type: header,
|
|
||||||
# so let's relax the test a bit.
|
|
||||||
if not msg.is_multipart() or msg.get_content_subtype() <> 'report':
|
|
||||||
return None
|
|
||||||
return check(msg)
|
|
|
@ -1,47 +0,0 @@
|
||||||
# Copyright (C) 2002 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
|
|
||||||
"""Recognizes (some) Microsoft Exchange formats."""
|
|
||||||
|
|
||||||
import re
|
|
||||||
import email.Iterators
|
|
||||||
|
|
||||||
scre = re.compile('did not reach the following recipient')
|
|
||||||
ecre = re.compile('MSEXCH:')
|
|
||||||
a1cre = re.compile('SMTP=(?P<addr>[^;]+); on ')
|
|
||||||
a2cre = re.compile('(?P<addr>[^ ]+) on ')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
addrs = {}
|
|
||||||
it = email.Iterators.body_line_iterator(msg)
|
|
||||||
# Find the start line
|
|
||||||
for line in it:
|
|
||||||
if scre.search(line):
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
# Search each line until we hit the end line
|
|
||||||
for line in it:
|
|
||||||
if ecre.search(line):
|
|
||||||
break
|
|
||||||
mo = a1cre.search(line)
|
|
||||||
if not mo:
|
|
||||||
mo = a2cre.search(line)
|
|
||||||
if mo:
|
|
||||||
addrs[mo.group('addr')] = 1
|
|
||||||
return addrs.keys()
|
|
|
@ -1,30 +0,0 @@
|
||||||
# Copyright (C) 1998,1999,2000,2001,2002 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
|
|
||||||
"""Parse bounce messages generated by Exim.
|
|
||||||
|
|
||||||
Exim adds an X-Failed-Recipients: header to bounce messages containing
|
|
||||||
an `addresslist' of failed addresses.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from email.Utils import getaddresses
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
all = msg.get_all('x-failed-recipients', [])
|
|
||||||
return [a for n, a in getaddresses(all)]
|
|
|
@ -1,70 +0,0 @@
|
||||||
# Copyright (C) 1998,1999,2000,2001,2002 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
|
|
||||||
"""This appears to be the format for Novell GroupWise and NTMail
|
|
||||||
|
|
||||||
X-Mailer: Novell GroupWise Internet Agent 5.5.3.1
|
|
||||||
X-Mailer: NTMail v4.30.0012
|
|
||||||
X-Mailer: Internet Mail Service (5.5.2653.19)
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
from email.Message import Message
|
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
acre = re.compile(r'<(?P<addr>[^>]*)>')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def find_textplain(msg):
|
|
||||||
if msg.get_content_type() == 'text/plain':
|
|
||||||
return msg
|
|
||||||
if msg.is_multipart:
|
|
||||||
for part in msg.get_payload():
|
|
||||||
if not isinstance(part, Message):
|
|
||||||
continue
|
|
||||||
ret = find_textplain(part)
|
|
||||||
if ret:
|
|
||||||
return ret
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
if msg.get_content_type() <> 'multipart/mixed' or not msg['x-mailer']:
|
|
||||||
return None
|
|
||||||
addrs = {}
|
|
||||||
# find the first text/plain part in the message
|
|
||||||
textplain = find_textplain(msg)
|
|
||||||
if not textplain:
|
|
||||||
return None
|
|
||||||
body = StringIO(textplain.get_payload())
|
|
||||||
while 1:
|
|
||||||
line = body.readline()
|
|
||||||
if not line:
|
|
||||||
break
|
|
||||||
mo = acre.search(line)
|
|
||||||
if mo:
|
|
||||||
addrs[mo.group('addr')] = 1
|
|
||||||
elif '@' in line:
|
|
||||||
i = line.find(' ')
|
|
||||||
if i == 0:
|
|
||||||
continue
|
|
||||||
if i < 0:
|
|
||||||
addrs[line] = 1
|
|
||||||
else:
|
|
||||||
addrs[line[:i]] = 1
|
|
||||||
return addrs.keys()
|
|
|
@ -1,31 +0,0 @@
|
||||||
# Copyright (C) 2001,2002 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
|
|
||||||
"""LLNL's custom Sendmail bounce message."""
|
|
||||||
|
|
||||||
import re
|
|
||||||
import email
|
|
||||||
|
|
||||||
acre = re.compile(r',\s*(?P<addr>\S+@[^,]+),', re.IGNORECASE)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
for line in email.Iterators.body_line_iterator(msg):
|
|
||||||
mo = acre.search(line)
|
|
||||||
if mo:
|
|
||||||
return [mo.group('addr')]
|
|
||||||
return []
|
|
|
@ -1,53 +0,0 @@
|
||||||
# Copyright (C) 1998-2003 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
|
|
||||||
"""Microsoft's `SMTPSVC' nears I kin tell."""
|
|
||||||
|
|
||||||
import re
|
|
||||||
from cStringIO import StringIO
|
|
||||||
from types import ListType
|
|
||||||
|
|
||||||
scre = re.compile(r'transcript of session follows', re.IGNORECASE)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
if msg.get_content_type() <> 'multipart/mixed':
|
|
||||||
return None
|
|
||||||
# Find the first subpart, which has no MIME type
|
|
||||||
try:
|
|
||||||
subpart = msg.get_payload(0)
|
|
||||||
except IndexError:
|
|
||||||
# The message *looked* like a multipart but wasn't
|
|
||||||
return None
|
|
||||||
data = subpart.get_payload()
|
|
||||||
if isinstance(data, ListType):
|
|
||||||
# The message is a multi-multipart, so not a matching bounce
|
|
||||||
return None
|
|
||||||
body = StringIO(data)
|
|
||||||
state = 0
|
|
||||||
addrs = []
|
|
||||||
while 1:
|
|
||||||
line = body.readline()
|
|
||||||
if not line:
|
|
||||||
break
|
|
||||||
if state == 0:
|
|
||||||
if scre.search(line):
|
|
||||||
state = 1
|
|
||||||
if state == 1:
|
|
||||||
if '@' in line:
|
|
||||||
addrs.append(line)
|
|
||||||
return addrs
|
|
|
@ -1,88 +0,0 @@
|
||||||
# Copyright (C) 1998,1999,2000,2001,2002 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
|
|
||||||
"""Netscape Messaging Server bounce formats.
|
|
||||||
|
|
||||||
I've seen at least one NMS server version 3.6 (envy.gmp.usyd.edu.au) bounce
|
|
||||||
messages of this format. Bounces come in DSN MIME format, but don't include
|
|
||||||
any -Recipient: headers. Gotta just parse the text :(
|
|
||||||
|
|
||||||
NMS 4.1 (dfw-smtpin1.email.verio.net) seems even worse, but we'll try to
|
|
||||||
decipher the format here too.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
pcre = re.compile(
|
|
||||||
r'This Message was undeliverable due to the following reason:',
|
|
||||||
re.IGNORECASE)
|
|
||||||
|
|
||||||
acre = re.compile(
|
|
||||||
r'(?P<reply>please reply to)?.*<(?P<addr>[^>]*)>',
|
|
||||||
re.IGNORECASE)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def flatten(msg, leaves):
|
|
||||||
# give us all the leaf (non-multipart) subparts
|
|
||||||
if msg.is_multipart():
|
|
||||||
for part in msg.get_payload():
|
|
||||||
flatten(part, leaves)
|
|
||||||
else:
|
|
||||||
leaves.append(msg)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
# Sigh. Some show NMS 3.6's show
|
|
||||||
# multipart/report; report-type=delivery-status
|
|
||||||
# and some show
|
|
||||||
# multipart/mixed;
|
|
||||||
if not msg.is_multipart():
|
|
||||||
return None
|
|
||||||
# We're looking for a text/plain subpart occuring before a
|
|
||||||
# message/delivery-status subpart.
|
|
||||||
plainmsg = None
|
|
||||||
leaves = []
|
|
||||||
flatten(msg, leaves)
|
|
||||||
for i, subpart in zip(range(len(leaves)-1), leaves):
|
|
||||||
if subpart.get_content_type() == 'text/plain':
|
|
||||||
plainmsg = subpart
|
|
||||||
break
|
|
||||||
if not plainmsg:
|
|
||||||
return None
|
|
||||||
# Total guesswork, based on captured examples...
|
|
||||||
body = StringIO(plainmsg.get_payload())
|
|
||||||
addrs = []
|
|
||||||
while 1:
|
|
||||||
line = body.readline()
|
|
||||||
if not line:
|
|
||||||
break
|
|
||||||
mo = pcre.search(line)
|
|
||||||
if mo:
|
|
||||||
# We found a bounce section, but I have no idea what the official
|
|
||||||
# format inside here is. :( We'll just search for <addr>
|
|
||||||
# strings.
|
|
||||||
while 1:
|
|
||||||
line = body.readline()
|
|
||||||
if not line:
|
|
||||||
break
|
|
||||||
mo = acre.search(line)
|
|
||||||
if mo and not mo.group('reply'):
|
|
||||||
addrs.append(mo.group('addr'))
|
|
||||||
return addrs
|
|
|
@ -1,85 +0,0 @@
|
||||||
# Copyright (C) 1998-2003 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
|
|
||||||
"""Parse bounce messages generated by Postfix.
|
|
||||||
|
|
||||||
This also matches something called `Keftamail' which looks just like Postfix
|
|
||||||
bounces with the word Postfix scratched out and the word `Keftamail' written
|
|
||||||
in in crayon.
|
|
||||||
|
|
||||||
It also matches something claiming to be `The BNS Postfix program', and
|
|
||||||
`SMTP_Gateway'. Everybody's gotta be different, huh?
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def flatten(msg, leaves):
|
|
||||||
# give us all the leaf (non-multipart) subparts
|
|
||||||
if msg.is_multipart():
|
|
||||||
for part in msg.get_payload():
|
|
||||||
flatten(part, leaves)
|
|
||||||
else:
|
|
||||||
leaves.append(msg)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# are these heuristics correct or guaranteed?
|
|
||||||
pcre = re.compile(r'[ \t]*the\s*(bns)?\s*(postfix|keftamail|smtp_gateway)',
|
|
||||||
re.IGNORECASE)
|
|
||||||
rcre = re.compile(r'failure reason:$', re.IGNORECASE)
|
|
||||||
acre = re.compile(r'<(?P<addr>[^>]*)>:')
|
|
||||||
|
|
||||||
def findaddr(msg):
|
|
||||||
addrs = []
|
|
||||||
body = StringIO(msg.get_payload())
|
|
||||||
# simple state machine
|
|
||||||
# 0 == nothing found
|
|
||||||
# 1 == salutation found
|
|
||||||
state = 0
|
|
||||||
while 1:
|
|
||||||
line = body.readline()
|
|
||||||
if not line:
|
|
||||||
break
|
|
||||||
# preserve leading whitespace
|
|
||||||
line = line.rstrip()
|
|
||||||
# yes use match to match at beginning of string
|
|
||||||
if state == 0 and (pcre.match(line) or rcre.match(line)):
|
|
||||||
state = 1
|
|
||||||
elif state == 1 and line:
|
|
||||||
mo = acre.search(line)
|
|
||||||
if mo:
|
|
||||||
addrs.append(mo.group('addr'))
|
|
||||||
# probably a continuation line
|
|
||||||
return addrs
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
if msg.get_content_type() not in ('multipart/mixed', 'multipart/report'):
|
|
||||||
return None
|
|
||||||
# We're looking for the plain/text subpart with a Content-Description: of
|
|
||||||
# `notification'.
|
|
||||||
leaves = []
|
|
||||||
flatten(msg, leaves)
|
|
||||||
for subpart in leaves:
|
|
||||||
if subpart.get_content_type() == 'text/plain' and \
|
|
||||||
subpart.get('content-description', '').lower() == 'notification':
|
|
||||||
# then...
|
|
||||||
return findaddr(subpart)
|
|
||||||
return None
|
|
|
@ -1,70 +0,0 @@
|
||||||
# Copyright (C) 1998-2006 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
|
|
||||||
# USA.
|
|
||||||
|
|
||||||
"""Parse bounce messages generated by qmail.
|
|
||||||
|
|
||||||
Qmail actually has a standard, called QSBMF (qmail-send bounce message
|
|
||||||
format), as described in
|
|
||||||
|
|
||||||
http://cr.yp.to/proto/qsbmf.txt
|
|
||||||
|
|
||||||
This module should be conformant.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
import email.Iterators
|
|
||||||
|
|
||||||
# Other (non-standard?) intros have been observed in the wild.
|
|
||||||
introtags = [
|
|
||||||
'Hi. This is the',
|
|
||||||
"We're sorry. There's a problem",
|
|
||||||
'Check your send e-mail address.'
|
|
||||||
]
|
|
||||||
acre = re.compile(r'<(?P<addr>[^>]*)>:')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
addrs = []
|
|
||||||
# simple state machine
|
|
||||||
# 0 = nothing seen yet
|
|
||||||
# 1 = intro paragraph seen
|
|
||||||
# 2 = recip paragraphs seen
|
|
||||||
state = 0
|
|
||||||
for line in email.Iterators.body_line_iterator(msg):
|
|
||||||
line = line.strip()
|
|
||||||
if state == 0:
|
|
||||||
for introtag in introtags:
|
|
||||||
if line.startswith(introtag):
|
|
||||||
state = 1
|
|
||||||
break
|
|
||||||
elif state == 1 and not line:
|
|
||||||
# Looking for the end of the intro paragraph
|
|
||||||
state = 2
|
|
||||||
elif state == 2:
|
|
||||||
if line.startswith('-'):
|
|
||||||
# We're looking at the break paragraph, so we're done
|
|
||||||
break
|
|
||||||
# At this point we know we must be looking at a recipient
|
|
||||||
# paragraph
|
|
||||||
mo = acre.match(line)
|
|
||||||
if mo:
|
|
||||||
addrs.append(mo.group('addr'))
|
|
||||||
# Otherwise, it must be a continuation line, so just ignore it
|
|
||||||
# Not looking at anything in particular
|
|
||||||
return addrs
|
|
|
@ -1,59 +0,0 @@
|
||||||
# Copyright (C) 1998-2006 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
|
|
||||||
# USA.
|
|
||||||
|
|
||||||
"""Something which claims
|
|
||||||
X-Mailer: <SMTP32 vXXXXXX>
|
|
||||||
|
|
||||||
What the heck is this thing? Here's a recent host:
|
|
||||||
|
|
||||||
% telnet 207.51.255.218 smtp
|
|
||||||
Trying 207.51.255.218...
|
|
||||||
Connected to 207.51.255.218.
|
|
||||||
Escape character is '^]'.
|
|
||||||
220 X1 NT-ESMTP Server 208.24.118.205 (IMail 6.00 45595-15)
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
import email
|
|
||||||
|
|
||||||
ecre = re.compile('original message follows', re.IGNORECASE)
|
|
||||||
acre = re.compile(r'''
|
|
||||||
( # several different prefixes
|
|
||||||
user\ mailbox[^:]*: # have been spotted in the
|
|
||||||
|delivery\ failed[^:]*: # wild...
|
|
||||||
|unknown\ user[^:]*:
|
|
||||||
|undeliverable\ +to
|
|
||||||
)
|
|
||||||
\s* # space separator
|
|
||||||
(?P<addr>.*) # and finally, the address
|
|
||||||
''', re.IGNORECASE | re.VERBOSE)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
mailer = msg.get('x-mailer', '')
|
|
||||||
if not mailer.startswith('<SMTP32 v'):
|
|
||||||
return
|
|
||||||
addrs = {}
|
|
||||||
for line in email.Iterators.body_line_iterator(msg):
|
|
||||||
if ecre.search(line):
|
|
||||||
break
|
|
||||||
mo = acre.search(line)
|
|
||||||
if mo:
|
|
||||||
addrs[mo.group('addr')] = 1
|
|
||||||
return addrs.keys()
|
|
|
@ -1,165 +0,0 @@
|
||||||
# Copyright (C) 1998-2006 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
|
|
||||||
# USA.
|
|
||||||
|
|
||||||
"""Recognizes simple heuristically delimited bounces."""
|
|
||||||
|
|
||||||
import re
|
|
||||||
import email.Iterators
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def _c(pattern):
|
|
||||||
return re.compile(pattern, re.IGNORECASE)
|
|
||||||
|
|
||||||
# This is a list of tuples of the form
|
|
||||||
#
|
|
||||||
# (start cre, end cre, address cre)
|
|
||||||
#
|
|
||||||
# where `cre' means compiled regular expression, start is the line just before
|
|
||||||
# the bouncing address block, end is the line just after the bouncing address
|
|
||||||
# block, and address cre is the regexp that will recognize the addresses. It
|
|
||||||
# must have a group called `addr' which will contain exactly and only the
|
|
||||||
# address that bounced.
|
|
||||||
PATTERNS = [
|
|
||||||
# sdm.de
|
|
||||||
(_c('here is your list of failed recipients'),
|
|
||||||
_c('here is your returned mail'),
|
|
||||||
_c(r'<(?P<addr>[^>]*)>')),
|
|
||||||
# sz-sb.de, corridor.com, nfg.nl
|
|
||||||
(_c('the following addresses had'),
|
|
||||||
_c('transcript of session follows'),
|
|
||||||
_c(r'<(?P<fulladdr>[^>]*)>|\(expanded from: <?(?P<addr>[^>)]*)>?\)')),
|
|
||||||
# robanal.demon.co.uk
|
|
||||||
(_c('this message was created automatically by mail delivery software'),
|
|
||||||
_c('original message follows'),
|
|
||||||
_c('rcpt to:\s*<(?P<addr>[^>]*)>')),
|
|
||||||
# s1.com (InterScan E-Mail VirusWall NT ???)
|
|
||||||
(_c('message from interscan e-mail viruswall nt'),
|
|
||||||
_c('end of message'),
|
|
||||||
_c('rcpt to:\s*<(?P<addr>[^>]*)>')),
|
|
||||||
# Smail
|
|
||||||
(_c('failed addresses follow:'),
|
|
||||||
_c('message text follows:'),
|
|
||||||
_c(r'\s*(?P<addr>\S+@\S+)')),
|
|
||||||
# newmail.ru
|
|
||||||
(_c('This is the machine generated message from mail service.'),
|
|
||||||
_c('--- Below the next line is a copy of the message.'),
|
|
||||||
_c('<(?P<addr>[^>]*)>')),
|
|
||||||
# turbosport.com runs something called `MDaemon 3.5.2' ???
|
|
||||||
(_c('The following addresses did NOT receive a copy of your message:'),
|
|
||||||
_c('--- Session Transcript ---'),
|
|
||||||
_c('[>]\s*(?P<addr>.*)$')),
|
|
||||||
# usa.net
|
|
||||||
(_c('Intended recipient:\s*(?P<addr>.*)$'),
|
|
||||||
_c('--------RETURNED MAIL FOLLOWS--------'),
|
|
||||||
_c('Intended recipient:\s*(?P<addr>.*)$')),
|
|
||||||
# hotpop.com
|
|
||||||
(_c('Undeliverable Address:\s*(?P<addr>.*)$'),
|
|
||||||
_c('Original message attached'),
|
|
||||||
_c('Undeliverable Address:\s*(?P<addr>.*)$')),
|
|
||||||
# Another demon.co.uk format
|
|
||||||
(_c('This message was created automatically by mail delivery'),
|
|
||||||
_c('^---- START OF RETURNED MESSAGE ----'),
|
|
||||||
_c("addressed to '(?P<addr>[^']*)'")),
|
|
||||||
# Prodigy.net full mailbox
|
|
||||||
(_c("User's mailbox is full:"),
|
|
||||||
_c('Unable to deliver mail.'),
|
|
||||||
_c("User's mailbox is full:\s*<(?P<addr>[^>]*)>")),
|
|
||||||
# Microsoft SMTPSVC
|
|
||||||
(_c('The email below could not be delivered to the following user:'),
|
|
||||||
_c('Old message:'),
|
|
||||||
_c('<(?P<addr>[^>]*)>')),
|
|
||||||
# Yahoo on behalf of other domains like sbcglobal.net
|
|
||||||
(_c('Unable to deliver message to the following address\(es\)\.'),
|
|
||||||
_c('--- Original message follows\.'),
|
|
||||||
_c('<(?P<addr>[^>]*)>:')),
|
|
||||||
# kundenserver.de
|
|
||||||
(_c('A message that you sent could not be delivered'),
|
|
||||||
_c('^---'),
|
|
||||||
_c('<(?P<addr>[^>]*)>')),
|
|
||||||
# another kundenserver.de
|
|
||||||
(_c('A message that you sent could not be delivered'),
|
|
||||||
_c('^---'),
|
|
||||||
_c('^(?P<addr>[^\s@]+@[^\s@:]+):')),
|
|
||||||
# thehartford.com
|
|
||||||
(_c('Delivery to the following recipients failed'),
|
|
||||||
_c("Bogus - there actually isn't anything"),
|
|
||||||
_c('^\s*(?P<addr>[^\s@]+@[^\s@]+)\s*$')),
|
|
||||||
# and another thehartfod.com/hartfordlife.com
|
|
||||||
(_c('^Your message\s*$'),
|
|
||||||
_c('^because:'),
|
|
||||||
_c('^\s*(?P<addr>[^\s@]+@[^\s@]+)\s*$')),
|
|
||||||
# kviv.be (NTMail)
|
|
||||||
(_c('^Unable to deliver message to'),
|
|
||||||
_c(r'\*+\s+End of message\s+\*+'),
|
|
||||||
_c('<(?P<addr>[^>]*)>')),
|
|
||||||
# earthlink.net supported domains
|
|
||||||
(_c('^Sorry, unable to deliver your message to'),
|
|
||||||
_c('^A copy of the original message'),
|
|
||||||
_c('\s*(?P<addr>[^\s@]+@[^\s@]+)\s+')),
|
|
||||||
# ademe.fr
|
|
||||||
(_c('^A message could not be delivered to:'),
|
|
||||||
_c('^Subject:'),
|
|
||||||
_c('^\s*(?P<addr>[^\s@]+@[^\s@]+)\s*$')),
|
|
||||||
# andrew.ac.jp
|
|
||||||
(_c('^Invalid final delivery userid:'),
|
|
||||||
_c('^Original message follows.'),
|
|
||||||
_c('\s*(?P<addr>[^\s@]+@[^\s@]+)\s*$')),
|
|
||||||
# E500_SMTP_Mail_Service@lerctr.org
|
|
||||||
(_c('------ Failed Recipients ------'),
|
|
||||||
_c('-------- Returned Mail --------'),
|
|
||||||
_c('<(?P<addr>[^>]*)>')),
|
|
||||||
# cynergycom.net
|
|
||||||
(_c('A message that you sent could not be delivered'),
|
|
||||||
_c('^---'),
|
|
||||||
_c('(?P<addr>[^\s@]+@[^\s@)]+)')),
|
|
||||||
# Next one goes here...
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg, patterns=None):
|
|
||||||
if patterns is None:
|
|
||||||
patterns = PATTERNS
|
|
||||||
# simple state machine
|
|
||||||
# 0 = nothing seen yet
|
|
||||||
# 1 = intro seen
|
|
||||||
addrs = {}
|
|
||||||
# MAS: This is a mess. The outer loop used to be over the message
|
|
||||||
# so we only looped through the message once. Looping through the
|
|
||||||
# message for each set of patterns is obviously way more work, but
|
|
||||||
# if we don't do it, problems arise because scre from the wrong
|
|
||||||
# pattern set matches first and then acre doesn't match. The
|
|
||||||
# alternative is to split things into separate modules, but then
|
|
||||||
# we process the message multiple times anyway.
|
|
||||||
for scre, ecre, acre in patterns:
|
|
||||||
state = 0
|
|
||||||
for line in email.Iterators.body_line_iterator(msg):
|
|
||||||
if state == 0:
|
|
||||||
if scre.search(line):
|
|
||||||
state = 1
|
|
||||||
if state == 1:
|
|
||||||
mo = acre.search(line)
|
|
||||||
if mo:
|
|
||||||
addr = mo.group('addr')
|
|
||||||
if addr:
|
|
||||||
addrs[mo.group('addr')] = 1
|
|
||||||
elif ecre.search(line):
|
|
||||||
break
|
|
||||||
if addrs:
|
|
||||||
break
|
|
||||||
return addrs.keys()
|
|
|
@ -1,50 +0,0 @@
|
||||||
# Copyright (C) 2001-2006 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
|
|
||||||
# USA.
|
|
||||||
|
|
||||||
"""Recognizes simple heuristically delimited warnings."""
|
|
||||||
|
|
||||||
from BouncerAPI import Stop
|
|
||||||
from SimpleMatch import _c
|
|
||||||
from SimpleMatch import process as _process
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# This is a list of tuples of the form
|
|
||||||
#
|
|
||||||
# (start cre, end cre, address cre)
|
|
||||||
#
|
|
||||||
# where `cre' means compiled regular expression, start is the line just before
|
|
||||||
# the bouncing address block, end is the line just after the bouncing address
|
|
||||||
# block, and address cre is the regexp that will recognize the addresses. It
|
|
||||||
# must have a group called `addr' which will contain exactly and only the
|
|
||||||
# address that bounced.
|
|
||||||
patterns = [
|
|
||||||
# pop3.pta.lia.net
|
|
||||||
(_c('The address to which the message has not yet been delivered is'),
|
|
||||||
_c('No action is required on your part'),
|
|
||||||
_c(r'\s*(?P<addr>\S+@\S+)\s*')),
|
|
||||||
# Next one goes here...
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
if _process(msg, patterns):
|
|
||||||
# It's a recognized warning so stop now
|
|
||||||
return Stop
|
|
||||||
else:
|
|
||||||
return []
|
|
|
@ -1,53 +0,0 @@
|
||||||
# Copyright (C) 1998,1999,2000,2001,2002 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
|
|
||||||
"""Yahoo! has its own weird format for bounces."""
|
|
||||||
|
|
||||||
import re
|
|
||||||
import email
|
|
||||||
from email.Utils import parseaddr
|
|
||||||
|
|
||||||
tcre = re.compile(r'message\s+from\s+yahoo\.\S+', re.IGNORECASE)
|
|
||||||
acre = re.compile(r'<(?P<addr>[^>]*)>:')
|
|
||||||
ecre = re.compile(r'--- Original message follows')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
# Yahoo! bounces seem to have a known subject value and something called
|
|
||||||
# an x-uidl: header, the value of which seems unimportant.
|
|
||||||
sender = parseaddr(msg.get('from', '').lower())[1] or ''
|
|
||||||
if not sender.startswith('mailer-daemon@yahoo'):
|
|
||||||
return None
|
|
||||||
addrs = []
|
|
||||||
# simple state machine
|
|
||||||
# 0 == nothing seen
|
|
||||||
# 1 == tag line seen
|
|
||||||
state = 0
|
|
||||||
for line in email.Iterators.body_line_iterator(msg):
|
|
||||||
line = line.strip()
|
|
||||||
if state == 0 and tcre.match(line):
|
|
||||||
state = 1
|
|
||||||
elif state == 1:
|
|
||||||
mo = acre.match(line)
|
|
||||||
if mo:
|
|
||||||
addrs.append(mo.group('addr'))
|
|
||||||
continue
|
|
||||||
mo = ecre.match(line)
|
|
||||||
if mo:
|
|
||||||
# we're at the end of the error response
|
|
||||||
break
|
|
||||||
return addrs
|
|
|
@ -1,79 +0,0 @@
|
||||||
# Copyright (C) 2000,2001,2002 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
|
|
||||||
"""Yale's mail server is pretty dumb.
|
|
||||||
|
|
||||||
Its reports include the end user's name, but not the full domain. I think we
|
|
||||||
can usually guess it right anyway. This is completely based on examination of
|
|
||||||
the corpse, and is subject to failure whenever Yale even slightly changes
|
|
||||||
their MTA. :(
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
from cStringIO import StringIO
|
|
||||||
from email.Utils import getaddresses
|
|
||||||
|
|
||||||
scre = re.compile(r'Message not delivered to the following', re.IGNORECASE)
|
|
||||||
ecre = re.compile(r'Error Detail', re.IGNORECASE)
|
|
||||||
acre = re.compile(r'\s+(?P<addr>\S+)\s+')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def process(msg):
|
|
||||||
if msg.is_multipart():
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
whofrom = getaddresses([msg.get('from', '')])[0][1]
|
|
||||||
if not whofrom:
|
|
||||||
return None
|
|
||||||
username, domain = whofrom.split('@', 1)
|
|
||||||
except (IndexError, ValueError):
|
|
||||||
return None
|
|
||||||
if username.lower() <> 'mailer-daemon':
|
|
||||||
return None
|
|
||||||
parts = domain.split('.')
|
|
||||||
parts.reverse()
|
|
||||||
for part1, part2 in zip(parts, ('edu', 'yale')):
|
|
||||||
if part1 <> part2:
|
|
||||||
return None
|
|
||||||
# Okay, we've established that the bounce came from the mailer-daemon at
|
|
||||||
# yale.edu. Let's look for a name, and then guess the relevant domains.
|
|
||||||
names = {}
|
|
||||||
body = StringIO(msg.get_payload())
|
|
||||||
state = 0
|
|
||||||
# simple state machine
|
|
||||||
# 0 == init
|
|
||||||
# 1 == intro found
|
|
||||||
while 1:
|
|
||||||
line = body.readline()
|
|
||||||
if not line:
|
|
||||||
break
|
|
||||||
if state == 0 and scre.search(line):
|
|
||||||
state = 1
|
|
||||||
elif state == 1 and ecre.search(line):
|
|
||||||
break
|
|
||||||
elif state == 1:
|
|
||||||
mo = acre.search(line)
|
|
||||||
if mo:
|
|
||||||
names[mo.group('addr')] = 1
|
|
||||||
# Now we have a bunch of names, these are either @yale.edu or
|
|
||||||
# @cs.yale.edu. Add them both.
|
|
||||||
addrs = []
|
|
||||||
for name in names.keys():
|
|
||||||
addrs.append(name + '@yale.edu')
|
|
||||||
addrs.append(name + '@cs.yale.edu')
|
|
||||||
return addrs
|
|
|
@ -1,15 +0,0 @@
|
||||||
# Copyright (C) 1998,1999,2000,2001,2002 by the Free Software Foundation, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software
|
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
|
@ -14,17 +14,22 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import ConfigParser
|
from __future__ import print_function
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import urllib2
|
|
||||||
import urlparse
|
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
|
from django.utils import six
|
||||||
|
from django.utils.encoding import force_bytes, force_text
|
||||||
|
from django.utils.six.moves import configparser as ConfigParser
|
||||||
|
from django.utils.six.moves.urllib import parse as urlparse
|
||||||
|
|
||||||
from quixote import cleanup
|
from quixote import cleanup
|
||||||
|
from wcs.qommon import force_str
|
||||||
from ..qommon import misc
|
from ..qommon import misc
|
||||||
from ..qommon.ctl import Command, make_option
|
from ..qommon.ctl import Command, make_option
|
||||||
from ..qommon.storage import atomic_write
|
from ..qommon.storage import atomic_write
|
||||||
|
@ -100,7 +105,7 @@ class CmdCheckHobos(Command):
|
||||||
# get environment definition from stdin
|
# get environment definition from stdin
|
||||||
self.all_services = json.load(sys.stdin)
|
self.all_services = json.load(sys.stdin)
|
||||||
else:
|
else:
|
||||||
self.all_services = json.load(file(args[1]))
|
self.all_services = json.load(open(args[1]))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
service = [x for x in self.all_services.get('services', []) if \
|
service = [x for x in self.all_services.get('services', []) if \
|
||||||
|
@ -116,7 +121,7 @@ class CmdCheckHobos(Command):
|
||||||
pub.app_dir = os.path.join(global_app_dir,
|
pub.app_dir = os.path.join(global_app_dir,
|
||||||
self.get_instance_path(service))
|
self.get_instance_path(service))
|
||||||
if not os.path.exists(pub.app_dir):
|
if not os.path.exists(pub.app_dir):
|
||||||
print 'initializing instance in', pub.app_dir
|
print('initializing instance in', pub.app_dir)
|
||||||
os.mkdir(pub.app_dir)
|
os.mkdir(pub.app_dir)
|
||||||
pub.initialize_app_dir()
|
pub.initialize_app_dir()
|
||||||
|
|
||||||
|
@ -124,17 +129,17 @@ class CmdCheckHobos(Command):
|
||||||
skeleton_filepath = os.path.join(global_app_dir, 'skeletons',
|
skeleton_filepath = os.path.join(global_app_dir, 'skeletons',
|
||||||
service.get('template_name'))
|
service.get('template_name'))
|
||||||
if os.path.exists(skeleton_filepath):
|
if os.path.exists(skeleton_filepath):
|
||||||
pub.import_zip(file(skeleton_filepath))
|
pub.import_zip(open(skeleton_filepath, 'rb'))
|
||||||
new_site = True
|
new_site = True
|
||||||
else:
|
else:
|
||||||
print 'updating instance in', pub.app_dir
|
print('updating instance in', pub.app_dir)
|
||||||
new_site = False
|
new_site = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.configure_site_options(service, pub,
|
self.configure_site_options(service, pub,
|
||||||
ignore_timestamp=sub_options.ignore_timestamp)
|
ignore_timestamp=sub_options.ignore_timestamp)
|
||||||
except NoChange:
|
except NoChange:
|
||||||
print ' skipping'
|
print(' skipping')
|
||||||
return
|
return
|
||||||
|
|
||||||
pub.set_config(skip_sql=True)
|
pub.set_config(skip_sql=True)
|
||||||
|
@ -145,13 +150,13 @@ class CmdCheckHobos(Command):
|
||||||
|
|
||||||
self.update_profile(self.all_services.get('profile', {}), pub)
|
self.update_profile(self.all_services.get('profile', {}), pub)
|
||||||
# Store hobo.json
|
# Store hobo.json
|
||||||
atomic_write(os.path.join(pub.app_dir, 'hobo.json'), json.dumps(self.all_services))
|
atomic_write(os.path.join(pub.app_dir, 'hobo.json'), force_bytes(json.dumps(self.all_services)))
|
||||||
|
|
||||||
def update_configuration(self, service, pub):
|
def update_configuration(self, service, pub):
|
||||||
if not pub.cfg.get('misc'):
|
if not pub.cfg.get('misc'):
|
||||||
pub.cfg['misc'] = {'charset': 'utf-8'}
|
pub.cfg['misc'] = {'charset': 'utf-8'}
|
||||||
pub.cfg['misc']['sitename'] = service.get('title').encode('utf-8')
|
pub.cfg['misc']['sitename'] = force_str(service.get('title'))
|
||||||
pub.cfg['misc']['frontoffice-url'] = service.get('base_url').encode('utf-8')
|
pub.cfg['misc']['frontoffice-url'] = force_str(service.get('base_url'))
|
||||||
if not pub.cfg.get('language'):
|
if not pub.cfg.get('language'):
|
||||||
pub.cfg['language'] = {'language': 'fr'}
|
pub.cfg['language'] = {'language': 'fr'}
|
||||||
|
|
||||||
|
@ -205,9 +210,9 @@ class CmdCheckHobos(Command):
|
||||||
component_dir)
|
component_dir)
|
||||||
|
|
||||||
if variables.get('default_from_email'):
|
if variables.get('default_from_email'):
|
||||||
pub.cfg['emails']['from'] = variables.get('default_from_email').encode('utf-8')
|
pub.cfg['emails']['from'] = force_str(variables.get('default_from_email'))
|
||||||
if variables.get('email_signature') is not None:
|
if variables.get('email_signature') is not None:
|
||||||
pub.cfg['emails']['footer'] = variables.get('email_signature').encode('utf-8')
|
pub.cfg['emails']['footer'] = force_str(variables.get('email_signature'))
|
||||||
|
|
||||||
pub.write_cfg()
|
pub.write_cfg()
|
||||||
|
|
||||||
|
@ -239,7 +244,7 @@ class CmdCheckHobos(Command):
|
||||||
field_class = EmailField
|
field_class = EmailField
|
||||||
elif attribute['kind'] in ('date', 'birthdate', 'fedict_date'):
|
elif attribute['kind'] in ('date', 'birthdate', 'fedict_date'):
|
||||||
field_class = DateField
|
field_class = DateField
|
||||||
new_field = field_class(label=attribute['label'].encode('utf-8'),
|
new_field = field_class(label=force_str(attribute['label']),
|
||||||
type=field_class.key,
|
type=field_class.key,
|
||||||
varname=attribute['name'])
|
varname=attribute['name'])
|
||||||
new_field.id = field_id
|
new_field.id = field_id
|
||||||
|
@ -248,8 +253,8 @@ class CmdCheckHobos(Command):
|
||||||
# remove it for the moment
|
# remove it for the moment
|
||||||
formdef.fields.remove(profile_fields[field_id])
|
formdef.fields.remove(profile_fields[field_id])
|
||||||
|
|
||||||
profile_fields[field_id].label = attribute['label'].encode('utf-8')
|
profile_fields[field_id].label = force_str(attribute['label'])
|
||||||
profile_fields[field_id].hint = attribute['description'].encode('utf-8')
|
profile_fields[field_id].hint = force_str(attribute['description'])
|
||||||
profile_fields[field_id].required = attribute['required']
|
profile_fields[field_id].required = attribute['required']
|
||||||
|
|
||||||
if attribute['disabled']:
|
if attribute['disabled']:
|
||||||
|
@ -322,12 +327,12 @@ class CmdCheckHobos(Command):
|
||||||
try:
|
try:
|
||||||
rfd = misc.urlopen(metadata_url)
|
rfd = misc.urlopen(metadata_url)
|
||||||
except misc.ConnectionError as e:
|
except misc.ConnectionError as e:
|
||||||
print >> sys.stderr, 'failed to get metadata URL', metadata_url, e
|
print('failed to get metadata URL', metadata_url, e, file=sys.stderr)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
s = rfd.read()
|
s = rfd.read()
|
||||||
(bfd, metadata_pathname) = tempfile.mkstemp('.metadata')
|
(bfd, metadata_pathname) = tempfile.mkstemp('.metadata')
|
||||||
atomic_write(metadata_pathname, s)
|
atomic_write(metadata_pathname, force_bytes(s))
|
||||||
|
|
||||||
from ..qommon.ident.idp import AdminIDPDir
|
from ..qommon.ident.idp import AdminIDPDir
|
||||||
admin_dir = AdminIDPDir()
|
admin_dir = AdminIDPDir()
|
||||||
|
@ -337,7 +342,7 @@ class CmdCheckHobos(Command):
|
||||||
if not admin_attribute:
|
if not admin_attribute:
|
||||||
admin_attribute = 'is_superuser=true'
|
admin_attribute = 'is_superuser=true'
|
||||||
else:
|
else:
|
||||||
admin_attribute = unicode(admin_attribute).encode('utf-8')
|
admin_attribute = force_str(admin_attribute)
|
||||||
admin_attribute_dict = dict([admin_attribute.split('=')])
|
admin_attribute_dict = dict([admin_attribute.split('=')])
|
||||||
pub.cfg['idp'][key_provider_id]['admin-attributes'] = admin_attribute_dict
|
pub.cfg['idp'][key_provider_id]['admin-attributes'] = admin_attribute_dict
|
||||||
pub.cfg['idp'][key_provider_id]['nameidformat'] = 'unspecified'
|
pub.cfg['idp'][key_provider_id]['nameidformat'] = 'unspecified'
|
||||||
|
@ -346,7 +351,7 @@ class CmdCheckHobos(Command):
|
||||||
pub.write_cfg()
|
pub.write_cfg()
|
||||||
|
|
||||||
def get_instance_path(self, service):
|
def get_instance_path(self, service):
|
||||||
parsed_url = urllib2.urlparse.urlsplit(service.get('base_url'))
|
parsed_url = urlparse.urlsplit(service.get('base_url'))
|
||||||
instance_path = parsed_url.netloc
|
instance_path = parsed_url.netloc
|
||||||
if parsed_url.path:
|
if parsed_url.path:
|
||||||
instance_path += '+%s' % parsed_url.path.replace('/', '+')
|
instance_path += '+%s' % parsed_url.path.replace('/', '+')
|
||||||
|
@ -428,8 +433,8 @@ class CmdCheckHobos(Command):
|
||||||
if not 'variables' in config.sections():
|
if not 'variables' in config.sections():
|
||||||
config.add_section('variables')
|
config.add_section('variables')
|
||||||
for key, value in variables.items():
|
for key, value in variables.items():
|
||||||
key = unicode(key).encode('utf-8')
|
key = force_str(key)
|
||||||
value = unicode(value).encode('utf-8')
|
value = force_str(value)
|
||||||
config.set('variables', key, value)
|
config.set('variables', key, value)
|
||||||
|
|
||||||
if not 'api-secrets' in config.sections():
|
if not 'api-secrets' in config.sections():
|
||||||
|
@ -460,7 +465,7 @@ class CmdCheckHobos(Command):
|
||||||
portal_agent_url, portal_agent_url)
|
portal_agent_url, portal_agent_url)
|
||||||
config.set('options', 'backoffice_extra_head', extra_head)
|
config.set('options', 'backoffice_extra_head', extra_head)
|
||||||
|
|
||||||
with open(site_options_filepath, 'wb') as site_options:
|
with open(site_options_filepath, 'w') as site_options:
|
||||||
config.write(site_options)
|
config.write(site_options)
|
||||||
|
|
||||||
def normalize_database_name(self, database_name):
|
def normalize_database_name(self, database_name):
|
||||||
|
@ -499,14 +504,14 @@ class CmdCheckHobos(Command):
|
||||||
if not createdb_cfg:
|
if not createdb_cfg:
|
||||||
createdb_cfg = {}
|
createdb_cfg = {}
|
||||||
for k, v in pub.cfg['postgresql'].items():
|
for k, v in pub.cfg['postgresql'].items():
|
||||||
if v and isinstance(v, basestring):
|
if v and isinstance(v, six.string_types):
|
||||||
createdb_cfg[k] = v
|
createdb_cfg[k] = v
|
||||||
|
|
||||||
try:
|
try:
|
||||||
pgconn = psycopg2.connect(**createdb_cfg)
|
pgconn = psycopg2.connect(**createdb_cfg)
|
||||||
except psycopg2.Error as e:
|
except psycopg2.Error as e:
|
||||||
print >> sys.stderr, 'failed to connect to postgresql (%s)' % \
|
print('failed to connect to postgresql (%s)' % \
|
||||||
psycopg2.errorcodes.lookup(e.pgcode)
|
psycopg2.errorcodes.lookup(e.pgcode), file=sys.stderr)
|
||||||
return
|
return
|
||||||
|
|
||||||
pgconn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
|
pgconn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
|
||||||
|
@ -525,8 +530,8 @@ class CmdCheckHobos(Command):
|
||||||
if cur.fetchall():
|
if cur.fetchall():
|
||||||
new_database = False
|
new_database = False
|
||||||
else:
|
else:
|
||||||
print >> sys.stderr, 'failed to create database (%s)' % \
|
print('failed to create database (%s)' % \
|
||||||
psycopg2.errorcodes.lookup(e.pgcode)
|
psycopg2.errorcodes.lookup(e.pgcode), file=sys.stderr)
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
cur.close()
|
cur.close()
|
||||||
|
@ -543,8 +548,8 @@ class CmdCheckHobos(Command):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def shared_secret(cls, secret1, secret2):
|
def shared_secret(cls, secret1, secret2):
|
||||||
secret1 = hashlib.sha256(secret1).hexdigest()
|
secret1 = hashlib.sha256(force_bytes(secret1)).hexdigest()
|
||||||
secret2 = hashlib.sha256(secret2).hexdigest()
|
secret2 = hashlib.sha256(force_bytes(secret2)).hexdigest()
|
||||||
return hex(int(secret1, 16) ^ int(secret2, 16))[2:-1]
|
return hex(int(secret1, 16) ^ int(secret2, 16))[2:-1]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,8 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import psycopg2
|
import psycopg2
|
||||||
|
@ -21,6 +23,8 @@ import psycopg2.errorcodes
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from shutil import rmtree
|
from shutil import rmtree
|
||||||
|
|
||||||
|
from django.utils import six
|
||||||
|
|
||||||
from ..qommon.ctl import Command, make_option
|
from ..qommon.ctl import Command, make_option
|
||||||
|
|
||||||
|
|
||||||
|
@ -56,7 +60,7 @@ class CmdDeleteTenant(Command):
|
||||||
if pub.is_using_postgresql():
|
if pub.is_using_postgresql():
|
||||||
postgresql_cfg = {}
|
postgresql_cfg = {}
|
||||||
for k, v in pub.cfg['postgresql'].items():
|
for k, v in pub.cfg['postgresql'].items():
|
||||||
if v and isinstance(v, basestring):
|
if v and isinstance(v, six.string_types):
|
||||||
postgresql_cfg[k] = v
|
postgresql_cfg[k] = v
|
||||||
|
|
||||||
# if there's a createdb-connection-params, we can do a DROP DATABASE with
|
# if there's a createdb-connection-params, we can do a DROP DATABASE with
|
||||||
|
@ -69,7 +73,8 @@ class CmdDeleteTenant(Command):
|
||||||
try:
|
try:
|
||||||
pgconn = psycopg2.connect(**createdb_cfg)
|
pgconn = psycopg2.connect(**createdb_cfg)
|
||||||
except psycopg2.Error as e:
|
except psycopg2.Error as e:
|
||||||
print >> sys.stderr, 'failed to connect to postgresql (%s)' % psycopg2.errorcodes.lookup(e.pgcode)
|
print('failed to connect to postgresql (%s)' % psycopg2.errorcodes.lookup(e.pgcode),
|
||||||
|
file=sys.stderr)
|
||||||
return
|
return
|
||||||
|
|
||||||
pgconn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
|
pgconn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
|
||||||
|
@ -103,8 +108,9 @@ class CmdDeleteTenant(Command):
|
||||||
(table_name, schema_name[:63]))
|
(table_name, schema_name[:63]))
|
||||||
|
|
||||||
except psycopg2.Error as e:
|
except psycopg2.Error as e:
|
||||||
print >> sys.stderr, 'failed to alter database %s: (%s)' % (createdb_cfg['database'],
|
print('failed to alter database %s: (%s)' % (
|
||||||
psycopg2.errorcodes.lookup(e.pgcode))
|
createdb_cfg['database'], psycopg2.errorcodes.lookup(e.pgcode)),
|
||||||
|
file=sys.stderr)
|
||||||
return
|
return
|
||||||
|
|
||||||
cur.close()
|
cur.close()
|
||||||
|
|
|
@ -14,6 +14,8 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from ..qommon.ctl import Command, make_option
|
from ..qommon.ctl import Command, make_option
|
||||||
|
@ -35,6 +37,6 @@ class CmdExportSettings(Command):
|
||||||
register_tld_names=False)
|
register_tld_names=False)
|
||||||
pub.app_dir = os.path.join(pub.app_dir, sub_options.vhost)
|
pub.app_dir = os.path.join(pub.app_dir, sub_options.vhost)
|
||||||
pub.reload_cfg()
|
pub.reload_cfg()
|
||||||
print pub.export_cfg()
|
print(pub.export_cfg())
|
||||||
|
|
||||||
CmdExportSettings.register()
|
CmdExportSettings.register()
|
||||||
|
|
|
@ -23,6 +23,7 @@ from wcs.roles import Role
|
||||||
from ..qommon.ctl import Command
|
from ..qommon.ctl import Command
|
||||||
from ..qommon.publisher import get_cfg
|
from ..qommon.publisher import get_cfg
|
||||||
from wcs.admin.settings import UserFieldsFormDef
|
from wcs.admin.settings import UserFieldsFormDef
|
||||||
|
from wcs.qommon import force_str
|
||||||
from wcs.qommon.misc import json_encode_helper
|
from wcs.qommon.misc import json_encode_helper
|
||||||
|
|
||||||
|
|
||||||
|
@ -107,17 +108,17 @@ class CmdHoboNotify(Command):
|
||||||
for o in data:
|
for o in data:
|
||||||
if 'uuid' not in o:
|
if 'uuid' not in o:
|
||||||
raise KeyError('role without uuid')
|
raise KeyError('role without uuid')
|
||||||
uuid = o['uuid'].encode(publisher.site_charset)
|
uuid = force_str(o['uuid'])
|
||||||
uuids.add(uuid)
|
uuids.add(uuid)
|
||||||
slug = None
|
slug = None
|
||||||
name = None
|
name = None
|
||||||
if action == 'provision':
|
if action == 'provision':
|
||||||
if not cls.check_valid_role(o):
|
if not cls.check_valid_role(o):
|
||||||
raise ValueError('invalid role')
|
raise ValueError('invalid role')
|
||||||
slug = o['slug'].encode(publisher.site_charset)
|
slug = force_str(o['slug'])
|
||||||
details = o.get('details', '').encode(publisher.site_charset) or None
|
details = force_str(o.get('details', '')) or None
|
||||||
name = o['name'].encode(publisher.site_charset)
|
name = force_str(o['name'])
|
||||||
emails = [email.encode(publisher.site_charset) for email in o['emails']]
|
emails = [force_str(email) for email in o['emails']]
|
||||||
emails_to_members = o['emails_to_members']
|
emails_to_members = o['emails_to_members']
|
||||||
# Find existing role
|
# Find existing role
|
||||||
role = Role.resolve(uuid, slug, name)
|
role = Role.resolve(uuid, slug, name)
|
||||||
|
@ -210,7 +211,7 @@ class CmdHoboNotify(Command):
|
||||||
users = User.get_users_with_name_identifier(uuid)
|
users = User.get_users_with_name_identifier(uuid)
|
||||||
for user in users:
|
for user in users:
|
||||||
user.remove_self()
|
user.remove_self()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
publisher.notify_of_exception(sys.exc_info(), context='[PROVISIONNING]')
|
publisher.notify_of_exception(sys.exc_info(), context='[PROVISIONNING]')
|
||||||
|
|
||||||
CmdHoboNotify.register()
|
CmdHoboNotify.register()
|
||||||
|
|
|
@ -16,12 +16,13 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import StringIO
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
from django.utils.encoding import force_bytes
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from django.core.management.base import CommandError
|
from django.core.management.base import CommandError
|
||||||
|
from django.utils.six import StringIO
|
||||||
|
|
||||||
from wcs.qommon.publisher import get_publisher_class
|
from wcs.qommon.publisher import get_publisher_class
|
||||||
|
|
||||||
|
@ -80,9 +81,9 @@ class Command(BaseCommand):
|
||||||
self.publisher.site_options.add_section('options')
|
self.publisher.site_options.add_section('options')
|
||||||
self.publisher.site_options.set('options', 'postgresql', 'true')
|
self.publisher.site_options.set('options', 'postgresql', 'true')
|
||||||
options_file = os.path.join(self.publisher.app_dir, 'site-options.cfg')
|
options_file = os.path.join(self.publisher.app_dir, 'site-options.cfg')
|
||||||
stringio = StringIO.StringIO()
|
stringio = StringIO()
|
||||||
self.publisher.site_options.write(stringio)
|
self.publisher.site_options.write(stringio)
|
||||||
atomic_write(options_file, stringio.getvalue())
|
atomic_write(options_file, force_bytes(stringio.getvalue()))
|
||||||
|
|
||||||
def store_users(self):
|
def store_users(self):
|
||||||
errors = []
|
errors = []
|
||||||
|
@ -156,4 +157,4 @@ class Command(BaseCommand):
|
||||||
|
|
||||||
def update_progress(self, progress, num_columns=120):
|
def update_progress(self, progress, num_columns=120):
|
||||||
sys.stdout.write('[%s] %s%%\r' % (
|
sys.stdout.write('[%s] %s%%\r' % (
|
||||||
('#'*((num_columns-10)*progress/100)).ljust(num_columns-15), progress))
|
('#'*int((num_columns-10)*progress/100)).ljust(num_columns-15), progress))
|
||||||
|
|
|
@ -48,5 +48,5 @@ class Command(TenantCommand):
|
||||||
if if_empty and not is_empty:
|
if if_empty and not is_empty:
|
||||||
return
|
return
|
||||||
|
|
||||||
publisher.import_zip(open(filename, 'r'))
|
publisher.import_zip(open(filename, 'rb'))
|
||||||
publisher.cleanup()
|
publisher.cleanup()
|
||||||
|
|
|
@ -21,6 +21,7 @@ import sys
|
||||||
|
|
||||||
from django.core.management.base import CommandError
|
from django.core.management.base import CommandError
|
||||||
|
|
||||||
|
from wcs.qommon import force_str
|
||||||
from wcs.qommon.publisher import get_publisher_class
|
from wcs.qommon.publisher import get_publisher_class
|
||||||
|
|
||||||
from . import TenantCommand
|
from . import TenantCommand
|
||||||
|
@ -54,4 +55,4 @@ class Command(TenantCommand):
|
||||||
for domain in domains:
|
for domain in domains:
|
||||||
sys.argv = args[:]
|
sys.argv = args[:]
|
||||||
self.init_tenant_publisher(domain, register_tld_names=False)
|
self.init_tenant_publisher(domain, register_tld_names=False)
|
||||||
runpy.run_module(module_name)
|
runpy.run_module(force_str(module_name))
|
||||||
|
|
|
@ -14,6 +14,8 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@ -106,7 +108,7 @@ def get_formdata_accepting_trigger(formdef, trigger, status_ids=None):
|
||||||
|
|
||||||
def match_row(substitution_variables, row):
|
def match_row(substitution_variables, row):
|
||||||
select = row['select']
|
select = row['select']
|
||||||
for key, value in select.iteritems():
|
for key, value in select.items():
|
||||||
if str(substitution_variables.get(key)) != str(value):
|
if str(substitution_variables.get(key)) != str(value):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
@ -116,7 +118,7 @@ def jump_and_perform(formdata, action, workflow_data=None):
|
||||||
get_publisher().substitutions.feed(get_publisher())
|
get_publisher().substitutions.feed(get_publisher())
|
||||||
get_publisher().substitutions.feed(formdata.formdef)
|
get_publisher().substitutions.feed(formdata.formdef)
|
||||||
get_publisher().substitutions.feed(formdata)
|
get_publisher().substitutions.feed(formdata)
|
||||||
print 'formdata %s jumps to status %s' % (formdata, action.status)
|
print('formdata %s jumps to status %s' % (formdata, action.status))
|
||||||
wcs_jump_and_perform(formdata, action, workflow_data=workflow_data)
|
wcs_jump_and_perform(formdata, action, workflow_data=workflow_data)
|
||||||
|
|
||||||
def select_and_jump_formdata(formdef, trigger, rows, status_ids=None):
|
def select_and_jump_formdata(formdef, trigger, rows, status_ids=None):
|
||||||
|
|
|
@ -1,99 +0,0 @@
|
||||||
# w.c.s. - web application for online forms
|
|
||||||
# Copyright (C) 2005-2010 Entr'ouvert
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation; either version 2 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import os
|
|
||||||
import email.Parser
|
|
||||||
|
|
||||||
from Bouncers import BouncerAPI
|
|
||||||
|
|
||||||
from ..qommon.ctl import Command
|
|
||||||
|
|
||||||
COMMA_SPACE = ', '
|
|
||||||
|
|
||||||
class CmdProcessBounce(Command):
|
|
||||||
name = 'process_bounce'
|
|
||||||
|
|
||||||
def execute(self, base_options, sub_options, args):
|
|
||||||
from ..qommon.tokens import Token
|
|
||||||
from ..qommon.bounces import Bounce
|
|
||||||
|
|
||||||
from .. import publisher
|
|
||||||
|
|
||||||
try:
|
|
||||||
publisher.WcsPublisher.configure(self.config)
|
|
||||||
pub = publisher.WcsPublisher.create_publisher(
|
|
||||||
register_tld_names=False)
|
|
||||||
except:
|
|
||||||
# not much we can do if we don't have a publisher object :/
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
parser = email.Parser.Parser()
|
|
||||||
msg = parser.parse(sys.stdin)
|
|
||||||
addrs = self.get_bounce_addrs(msg)
|
|
||||||
if addrs is None:
|
|
||||||
# not a bounce
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
to = msg['To']
|
|
||||||
local_part, server_part = to.split('@')
|
|
||||||
token_id = local_part.split('+')[1]
|
|
||||||
except (IndexError, KeyError):
|
|
||||||
return
|
|
||||||
|
|
||||||
pub.app_dir = os.path.join(pub.app_dir, server_part)
|
|
||||||
if not os.path.exists(pub.app_dir):
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
token = Token.get(token_id)
|
|
||||||
except KeyError:
|
|
||||||
return
|
|
||||||
|
|
||||||
if token.type != 'email-bounce':
|
|
||||||
return
|
|
||||||
|
|
||||||
token.remove_self()
|
|
||||||
|
|
||||||
bounce = Bounce()
|
|
||||||
bounce.arrival_time = time.time()
|
|
||||||
bounce.bounce_message = msg.as_string()
|
|
||||||
bounce.addrs = addrs
|
|
||||||
bounce.original_message = token.email_message
|
|
||||||
bounce.original_rcpts = token.email_rcpts
|
|
||||||
bounce.email_type = token.email_type
|
|
||||||
bounce.store()
|
|
||||||
except:
|
|
||||||
pub.notify_of_exception(sys.exc_info(), context='[BOUNCE]')
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_bounce_addrs(cls, msg):
|
|
||||||
bouncers_dir = os.path.join(os.path.dirname(__file__), 'Bouncers')
|
|
||||||
sys.path.append(bouncers_dir)
|
|
||||||
for modname in BouncerAPI.BOUNCE_PIPELINE:
|
|
||||||
__import__(modname)
|
|
||||||
addrs = sys.modules[modname].process(msg)
|
|
||||||
if addrs is BouncerAPI.Stop:
|
|
||||||
return None # Stop means to ignore message
|
|
||||||
if addrs:
|
|
||||||
return addrs
|
|
||||||
return None # didn't find any match
|
|
||||||
|
|
||||||
CmdProcessBounce.register()
|
|
|
@ -14,6 +14,8 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import tarfile
|
import tarfile
|
||||||
import os
|
import os
|
||||||
|
@ -41,12 +43,12 @@ class CmdRestore(Command):
|
||||||
hostname = args[0]
|
hostname = args[0]
|
||||||
pub.app_dir = os.path.join(pub.app_dir, hostname)
|
pub.app_dir = os.path.join(pub.app_dir, hostname)
|
||||||
if os.path.exists(pub.app_dir):
|
if os.path.exists(pub.app_dir):
|
||||||
print >> sys.stderr, 'cannot overwrite site'
|
print('cannot overwrite site', file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
os.mkdir(pub.app_dir)
|
os.mkdir(pub.app_dir)
|
||||||
|
|
||||||
if not sub_options.filename:
|
if not sub_options.filename:
|
||||||
print >> sys.stderr, 'missing --file parameter'
|
print('missing --file parameter', file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
backup_filepath = sub_options.filename
|
backup_filepath = sub_options.filename
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,8 @@
|
||||||
#
|
#
|
||||||
# [1]: file django/core/management/commands/shell.py
|
# [1]: file django/core/management/commands/shell.py
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
from ..qommon.ctl import Command, make_option
|
from ..qommon.ctl import Command, make_option
|
||||||
|
@ -40,7 +42,7 @@ class CmdShell(Command):
|
||||||
register_tld_names=False)
|
register_tld_names=False)
|
||||||
publisher.app_dir = os.path.join(publisher.APP_DIR, args[0])
|
publisher.app_dir = os.path.join(publisher.APP_DIR, args[0])
|
||||||
if not os.path.exists(publisher.app_dir):
|
if not os.path.exists(publisher.app_dir):
|
||||||
print 'Application directory %r does not exist.' % publisher.app_dir
|
print('Application directory %r does not exist.' % publisher.app_dir)
|
||||||
raise SystemExit(1)
|
raise SystemExit(1)
|
||||||
publisher.set_config()
|
publisher.set_config()
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -14,6 +14,8 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@ -32,7 +34,7 @@ class CmdWipeData(Command):
|
||||||
|
|
||||||
def execute(self, base_options, sub_options, args):
|
def execute(self, base_options, sub_options, args):
|
||||||
if not sub_options.vhost:
|
if not sub_options.vhost:
|
||||||
print >> sys.stderr, 'you must specify --vhost'
|
print('you must specify --vhost', file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
from .. import publisher
|
from .. import publisher
|
||||||
|
|
|
@ -18,13 +18,15 @@ import collections
|
||||||
import hashlib
|
import hashlib
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
|
from django.utils import six
|
||||||
|
from django.utils.encoding import force_text, force_bytes
|
||||||
from django.utils.six.moves.urllib import parse as urllib
|
from django.utils.six.moves.urllib import parse as urllib
|
||||||
from django.utils.six.moves.urllib import parse as urlparse
|
from django.utils.six.moves.urllib import parse as urlparse
|
||||||
|
|
||||||
from quixote import get_publisher, get_request, get_session
|
from quixote import get_publisher, get_request, get_session
|
||||||
from quixote.html import TemplateIO
|
from quixote.html import TemplateIO
|
||||||
|
|
||||||
from .qommon import _
|
from .qommon import _, force_str
|
||||||
from .qommon.form import *
|
from .qommon.form import *
|
||||||
from .qommon.humantime import seconds2humanduration
|
from .qommon.humantime import seconds2humanduration
|
||||||
from .qommon.misc import get_variadic_url
|
from .qommon.misc import get_variadic_url
|
||||||
|
@ -163,7 +165,7 @@ def get_structured_items(data_source, mode=None):
|
||||||
elif len(value[0]) == 1:
|
elif len(value[0]) == 1:
|
||||||
return [{'id': x[0], 'text': x[0]} for x in value]
|
return [{'id': x[0], 'text': x[0]} for x in value]
|
||||||
return value
|
return value
|
||||||
elif isinstance(value[0], basestring):
|
elif isinstance(value[0], six.string_types):
|
||||||
return [{'id': x, 'text': x} for x in value]
|
return [{'id': x, 'text': x} for x in value]
|
||||||
return value
|
return value
|
||||||
except:
|
except:
|
||||||
|
@ -187,7 +189,7 @@ def get_structured_items(data_source, mode=None):
|
||||||
return request.datasources_cache[url]
|
return request.datasources_cache[url]
|
||||||
|
|
||||||
if cache_duration:
|
if cache_duration:
|
||||||
cache_key = 'data-source-%s' % hashlib.md5(url).hexdigest()
|
cache_key = 'data-source-%s' % force_str(hashlib.md5(force_bytes(url)).hexdigest())
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
items = cache.get(cache_key)
|
items = cache.get(cache_key)
|
||||||
if items is not None:
|
if items is not None:
|
||||||
|
@ -317,12 +319,12 @@ class NamedDataSource(XmlStorableObject):
|
||||||
def export_data_source_to_xml(self, element, attribute_name, charset):
|
def export_data_source_to_xml(self, element, attribute_name, charset):
|
||||||
data_source = getattr(self, attribute_name)
|
data_source = getattr(self, attribute_name)
|
||||||
ET.SubElement(element, 'type').text = data_source.get('type')
|
ET.SubElement(element, 'type').text = data_source.get('type')
|
||||||
ET.SubElement(element, 'value').text = unicode(data_source.get('value') or '', charset)
|
ET.SubElement(element, 'value').text = force_text(data_source.get('value') or '', charset)
|
||||||
|
|
||||||
def import_data_source_from_xml(self, element, charset):
|
def import_data_source_from_xml(self, element, charset):
|
||||||
return {
|
return {
|
||||||
'type': str(element.find('type').text),
|
'type': force_str(element.find('type').text),
|
||||||
'value': (element.find('value').text or '').encode(charset)
|
'value': force_str(element.find('value').text or ''),
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
@ -21,6 +21,7 @@ import datetime
|
||||||
import time
|
import time
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
|
import sys
|
||||||
import base64
|
import base64
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
import collections
|
import collections
|
||||||
|
@ -28,12 +29,13 @@ import collections
|
||||||
from quixote import get_request, get_publisher
|
from quixote import get_request, get_publisher
|
||||||
from quixote.html import htmltext, TemplateIO
|
from quixote.html import htmltext, TemplateIO
|
||||||
|
|
||||||
from django.utils.encoding import smart_text
|
from django.utils import six
|
||||||
|
from django.utils.encoding import force_bytes, force_text, smart_text
|
||||||
from django.utils.formats import date_format as django_date_format
|
from django.utils.formats import date_format as django_date_format
|
||||||
from django.utils.html import urlize
|
from django.utils.html import urlize
|
||||||
from django.utils.six.moves.html_parser import HTMLParser
|
from django.utils.six.moves.html_parser import HTMLParser
|
||||||
|
|
||||||
from .qommon import _
|
from .qommon import _, force_str
|
||||||
from .qommon import evalutils
|
from .qommon import evalutils
|
||||||
from .qommon.form import *
|
from .qommon.form import *
|
||||||
from .qommon.misc import localstrftime, strftime, date_format, ellipsize, can_thumbnail
|
from .qommon.misc import localstrftime, strftime, date_format, ellipsize, can_thumbnail
|
||||||
|
@ -192,9 +194,8 @@ class Field(object):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def unhtmled_label(self):
|
def unhtmled_label(self):
|
||||||
charset = get_publisher().site_charset
|
return force_str(HTMLParser().unescape(force_text(
|
||||||
return HTMLParser().unescape(unicode(
|
re.sub('<.*?>', ' ', self.label))).strip())
|
||||||
re.sub('<.*?>', ' ', self.label), charset)).strip().encode(charset)
|
|
||||||
|
|
||||||
def get_admin_attributes(self):
|
def get_admin_attributes(self):
|
||||||
return ['label', 'type', 'condition']
|
return ['label', 'type', 'condition']
|
||||||
|
@ -239,13 +240,13 @@ class Field(object):
|
||||||
continue
|
continue
|
||||||
el = ET.SubElement(field, attribute)
|
el = ET.SubElement(field, attribute)
|
||||||
if type(val) is dict:
|
if type(val) is dict:
|
||||||
for k, v in val.items():
|
for k, v in sorted(val.items()):
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
text_value = unicode(v, charset, 'replace')
|
text_value = force_text(v, charset, errors='replace')
|
||||||
else:
|
else:
|
||||||
# field having non str value in dictionnary field must overload
|
# field having non str value in dictionnary field must overload
|
||||||
# import_to_xml to handle import
|
# import_to_xml to handle import
|
||||||
text_value = unicode(v)
|
text_value = force_text(v)
|
||||||
ET.SubElement(el, k).text = text_value
|
ET.SubElement(el, k).text = text_value
|
||||||
elif type(val) is list:
|
elif type(val) is list:
|
||||||
if attribute[-1] == 's':
|
if attribute[-1] == 's':
|
||||||
|
@ -253,12 +254,9 @@ class Field(object):
|
||||||
else:
|
else:
|
||||||
atname = 'item'
|
atname = 'item'
|
||||||
for v in val:
|
for v in val:
|
||||||
ET.SubElement(el, atname).text = unicode(v, charset, 'replace')
|
ET.SubElement(el, atname).text = force_text(v, charset, errors='replace')
|
||||||
elif type(val) in (str, unicode):
|
elif isinstance(val, six.string_types):
|
||||||
if type(val) is unicode:
|
el.text = force_text(val, charset, errors='replace')
|
||||||
el.text = val
|
|
||||||
else:
|
|
||||||
el.text = unicode(val, charset, 'replace')
|
|
||||||
else:
|
else:
|
||||||
el.text = str(val)
|
el.text = str(val)
|
||||||
return field
|
return field
|
||||||
|
@ -274,11 +272,11 @@ class Field(object):
|
||||||
continue
|
continue
|
||||||
if list(el):
|
if list(el):
|
||||||
if type(getattr(self, attribute)) is list:
|
if type(getattr(self, attribute)) is list:
|
||||||
v = [x.text.encode(charset) for x in el]
|
v = [force_str(x.text) for x in el]
|
||||||
elif type(getattr(self, attribute)) is dict:
|
elif type(getattr(self, attribute)) is dict:
|
||||||
v = {}
|
v = {}
|
||||||
for e in el:
|
for e in el:
|
||||||
v[e.tag] = e.text.encode(charset)
|
v[e.tag] = force_str(e.text)
|
||||||
else:
|
else:
|
||||||
print('currently:', self.__dict__)
|
print('currently:', self.__dict__)
|
||||||
print(' attribute:', attribute)
|
print(' attribute:', attribute)
|
||||||
|
@ -291,12 +289,12 @@ class Field(object):
|
||||||
elif el.text in ('False', 'True'): # bools
|
elif el.text in ('False', 'True'): # bools
|
||||||
setattr(self, attribute, el.text == 'True')
|
setattr(self, attribute, el.text == 'True')
|
||||||
elif type(getattr(self, attribute)) is int:
|
elif type(getattr(self, attribute)) is int:
|
||||||
setattr(self, attribute, int(el.text.encode(charset)))
|
setattr(self, attribute, int(el.text))
|
||||||
else:
|
else:
|
||||||
setattr(self, attribute, el.text.encode(charset))
|
setattr(self, attribute, force_str(el.text))
|
||||||
if include_id:
|
if include_id:
|
||||||
try:
|
try:
|
||||||
self.id = elem.find('id').text.encode(charset)
|
self.id = force_str(elem.find('id').text)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -306,11 +304,11 @@ class Field(object):
|
||||||
return
|
return
|
||||||
if node.findall('type'):
|
if node.findall('type'):
|
||||||
self.condition = {
|
self.condition = {
|
||||||
'type': node.find('type').text.encode(charset),
|
'type': force_str(node.find('type').text),
|
||||||
'value': node.find('value').text.encode(charset),
|
'value': force_str(node.find('value').text),
|
||||||
}
|
}
|
||||||
elif node.text:
|
elif node.text:
|
||||||
self.condition = {'type': 'python', 'value': node.text.strip().encode(charset)}
|
self.condition = {'type': 'python', 'value': force_str(node.text).strip()}
|
||||||
|
|
||||||
def get_rst_view_value(self, value, indent=''):
|
def get_rst_view_value(self, value, indent=''):
|
||||||
return indent + self.get_view_value(value)
|
return indent + self.get_view_value(value)
|
||||||
|
@ -754,8 +752,8 @@ class StringField(WidgetField):
|
||||||
value = value or ''
|
value = value or ''
|
||||||
if value.startswith('http://') or value.startswith('https://'):
|
if value.startswith('http://') or value.startswith('https://'):
|
||||||
charset = get_publisher().site_charset
|
charset = get_publisher().site_charset
|
||||||
value = unicode(value, charset)
|
value = force_text(value, charset)
|
||||||
return htmltext(urlize(value, nofollow=True, autoescape=True).encode(charset))
|
return htmltext(force_str(urlize(value, nofollow=True, autoescape=True)))
|
||||||
return str(value)
|
return str(value)
|
||||||
|
|
||||||
def get_rst_view_value(self, value, indent=''):
|
def get_rst_view_value(self, value, indent=''):
|
||||||
|
@ -769,7 +767,7 @@ class StringField(WidgetField):
|
||||||
|
|
||||||
def migrate(self):
|
def migrate(self):
|
||||||
changed = super(StringField, self).migrate()
|
changed = super(StringField, self).migrate()
|
||||||
if isinstance(self.validation, basestring):
|
if isinstance(self.validation, six.string_types):
|
||||||
self.validation = {'type': 'regex', 'value': self.validation}
|
self.validation = {'type': 'regex', 'value': self.validation}
|
||||||
changed = True
|
changed = True
|
||||||
return changed
|
return changed
|
||||||
|
@ -962,7 +960,7 @@ class FileField(WidgetField):
|
||||||
and cur_dt != document_types[document_type_id]:
|
and cur_dt != document_types[document_type_id]:
|
||||||
cur_dt = document_types[document_type_id]
|
cur_dt = document_types[document_type_id]
|
||||||
options = [(None, '---', {})]
|
options = [(None, '---', {})]
|
||||||
options += [(doc_type, doc_type['label'], key) for key, doc_type in document_types.iteritems()]
|
options += [(doc_type, doc_type['label'], key) for key, doc_type in document_types.items()]
|
||||||
form.add(SingleSelectWidget, 'document_type', title=_('File type suggestion'),
|
form.add(SingleSelectWidget, 'document_type', title=_('File type suggestion'),
|
||||||
value=cur_dt, options=options,
|
value=cur_dt, options=options,
|
||||||
advanced=not(cur_dt))
|
advanced=not(cur_dt))
|
||||||
|
@ -1003,7 +1001,7 @@ class FileField(WidgetField):
|
||||||
# b64_content key and a filename keys and an optional
|
# b64_content key and a filename keys and an optional
|
||||||
# content_type key.
|
# content_type key.
|
||||||
if 'b64_content' in value:
|
if 'b64_content' in value:
|
||||||
value['content'] = base64.decodestring(value['b64_content'])
|
value['content'] = base64.decodestring(force_bytes(value['b64_content']))
|
||||||
if 'filename' in value and 'content' in value:
|
if 'filename' in value and 'content' in value:
|
||||||
content_type = value.get('content_type') or 'application/octet-stream'
|
content_type = value.get('content_type') or 'application/octet-stream'
|
||||||
if content_type.startswith('text/'):
|
if content_type.startswith('text/'):
|
||||||
|
@ -1011,7 +1009,7 @@ class FileField(WidgetField):
|
||||||
else:
|
else:
|
||||||
charset = None
|
charset = None
|
||||||
upload = PicklableUpload(value['filename'], content_type, charset)
|
upload = PicklableUpload(value['filename'], content_type, charset)
|
||||||
upload.receive([value['content']])
|
upload.receive([force_bytes(value['content'])])
|
||||||
return upload
|
return upload
|
||||||
raise ValueError('invalid data for file type (%r)' % value)
|
raise ValueError('invalid data for file type (%r)' % value)
|
||||||
|
|
||||||
|
@ -1036,7 +1034,7 @@ class FileField(WidgetField):
|
||||||
'field_id': self.id,
|
'field_id': self.id,
|
||||||
'filename': value.base_filename,
|
'filename': value.base_filename,
|
||||||
'content_type': value.content_type or 'application/octet-stream',
|
'content_type': value.content_type or 'application/octet-stream',
|
||||||
'content': base64.b64encode(value.get_content())
|
'content': force_text(base64.b64encode(value.get_content())),
|
||||||
}
|
}
|
||||||
|
|
||||||
def from_json_value(self, value):
|
def from_json_value(self, value):
|
||||||
|
@ -1075,7 +1073,7 @@ class FileField(WidgetField):
|
||||||
}
|
}
|
||||||
# Local document types
|
# Local document types
|
||||||
document_types.update(get_cfg('filetypes', {}))
|
document_types.update(get_cfg('filetypes', {}))
|
||||||
for key, document_type in document_types.iteritems():
|
for key, document_type in document_types.items():
|
||||||
document_type['id'] = key
|
document_type['id'] = key
|
||||||
# add current file type if it does not exist anymore in the settings
|
# add current file type if it does not exist anymore in the settings
|
||||||
cur_dt = self.document_type or {}
|
cur_dt = self.document_type or {}
|
||||||
|
@ -1091,7 +1089,7 @@ class FileField(WidgetField):
|
||||||
file_type = self.__dict__['file_type']
|
file_type = self.__dict__['file_type']
|
||||||
document_types = self.get_document_types()
|
document_types = self.get_document_types()
|
||||||
parts = []
|
parts = []
|
||||||
for key, value in document_types.iteritems():
|
for key, value in document_types.items():
|
||||||
if file_type == value.get('mimetypes'):
|
if file_type == value.get('mimetypes'):
|
||||||
self.document_type = value.copy()
|
self.document_type = value.copy()
|
||||||
self.document_type['id'] = key
|
self.document_type['id'] = key
|
||||||
|
@ -1763,19 +1761,19 @@ class PageField(Field):
|
||||||
for post_condition_node in node.findall('post_condition'):
|
for post_condition_node in node.findall('post_condition'):
|
||||||
if post_condition_node.findall('condition/type'):
|
if post_condition_node.findall('condition/type'):
|
||||||
condition = {
|
condition = {
|
||||||
'type': post_condition_node.find('condition/type').text.encode(charset),
|
'type': force_str(post_condition_node.find('condition/type').text),
|
||||||
'value': post_condition_node.find('condition/value').text.encode(charset),
|
'value': force_str(post_condition_node.find('condition/value').text),
|
||||||
}
|
}
|
||||||
elif post_condition_node.find('condition').text:
|
elif post_condition_node.find('condition').text:
|
||||||
condition = {
|
condition = {
|
||||||
'type': 'python',
|
'type': 'python',
|
||||||
'value': post_condition_node.find('condition').text.encode(charset),
|
'value': force_str(post_condition_node.find('condition').text),
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
self.post_conditions.append({
|
self.post_conditions.append({
|
||||||
'condition': condition,
|
'condition': condition,
|
||||||
'error_message': post_condition_node.find('error_message').text.encode(charset),
|
'error_message': force_str(post_condition_node.find('error_message').text),
|
||||||
})
|
})
|
||||||
|
|
||||||
def post_conditions_export_to_xml(self, node, charset, include_id=False):
|
def post_conditions_export_to_xml(self, node, charset, include_id=False):
|
||||||
|
@ -1786,12 +1784,12 @@ class PageField(Field):
|
||||||
for post_condition in self.post_conditions:
|
for post_condition in self.post_conditions:
|
||||||
post_condition_node = ET.SubElement(conditions_node, 'post_condition')
|
post_condition_node = ET.SubElement(conditions_node, 'post_condition')
|
||||||
condition_node = ET.SubElement(post_condition_node, 'condition')
|
condition_node = ET.SubElement(post_condition_node, 'condition')
|
||||||
ET.SubElement(condition_node, 'type').text = unicode(
|
ET.SubElement(condition_node, 'type').text = force_text(
|
||||||
post_condition['condition'].get('type') or '', charset, 'replace')
|
post_condition['condition'].get('type') or '', charset, errors='replace')
|
||||||
ET.SubElement(condition_node, 'value').text = unicode(
|
ET.SubElement(condition_node, 'value').text = force_text(
|
||||||
post_condition['condition'].get('value') or '', charset, 'replace')
|
post_condition['condition'].get('value') or '', charset, errors='replace')
|
||||||
ET.SubElement(post_condition_node, 'error_message').text = unicode(
|
ET.SubElement(post_condition_node, 'error_message').text = force_text(
|
||||||
post_condition['error_message'] or '', charset, 'replace')
|
post_condition['error_message'] or '', charset, errors='replace')
|
||||||
|
|
||||||
def fill_admin_form(self, form):
|
def fill_admin_form(self, form):
|
||||||
form.add(StringWidget, 'label', title = _('Label'), value = self.label,
|
form.add(StringWidget, 'label', title = _('Label'), value = self.label,
|
||||||
|
@ -1808,7 +1806,7 @@ class PageField(Field):
|
||||||
|
|
||||||
def migrate(self):
|
def migrate(self):
|
||||||
changed = super(PageField, self).migrate()
|
changed = super(PageField, self).migrate()
|
||||||
if isinstance(self.condition, basestring):
|
if isinstance(self.condition, six.string_types):
|
||||||
if self.condition:
|
if self.condition:
|
||||||
self.condition = {'type': 'python', 'value': self.condition}
|
self.condition = {'type': 'python', 'value': self.condition}
|
||||||
else:
|
else:
|
||||||
|
@ -1816,7 +1814,7 @@ class PageField(Field):
|
||||||
changed = True
|
changed = True
|
||||||
for post_condition in self.post_conditions or []:
|
for post_condition in self.post_conditions or []:
|
||||||
condition = post_condition.get('condition')
|
condition = post_condition.get('condition')
|
||||||
if isinstance(condition, basestring):
|
if isinstance(condition, six.string_types):
|
||||||
if condition:
|
if condition:
|
||||||
post_condition['condition'] = {'type': 'python', 'value': condition}
|
post_condition['condition'] = {'type': 'python', 'value': condition}
|
||||||
else:
|
else:
|
||||||
|
@ -2265,8 +2263,8 @@ class RankedItemsField(WidgetField):
|
||||||
def get_view_value(self, value):
|
def get_view_value(self, value):
|
||||||
r = TemplateIO(html=True)
|
r = TemplateIO(html=True)
|
||||||
r += htmltext('<ul>')
|
r += htmltext('<ul>')
|
||||||
items = value.items()
|
items = list(value.items())
|
||||||
items.sort(lambda x,y: cmp(x[1], y[1]))
|
items.sort(key=lambda x: x[1] or sys.maxsize)
|
||||||
counter = 0
|
counter = 0
|
||||||
last_it = None
|
last_it = None
|
||||||
for it in items:
|
for it in items:
|
||||||
|
@ -2279,8 +2277,8 @@ class RankedItemsField(WidgetField):
|
||||||
return r.getvalue()
|
return r.getvalue()
|
||||||
|
|
||||||
def get_rst_view_value(self, value, indent=''):
|
def get_rst_view_value(self, value, indent=''):
|
||||||
items = value.items()
|
items = list(value.items())
|
||||||
items.sort(lambda x,y: cmp(x[1], y[1]))
|
items.sort(key=lambda x: x[1] or sys.maxsize)
|
||||||
counter = 0
|
counter = 0
|
||||||
last_it = None
|
last_it = None
|
||||||
values = []
|
values = []
|
||||||
|
@ -2302,9 +2300,9 @@ class RankedItemsField(WidgetField):
|
||||||
return ['']
|
return ['']
|
||||||
if type(value) is not dict:
|
if type(value) is not dict:
|
||||||
value = {}
|
value = {}
|
||||||
items = value.items()
|
items = [x for x in value.items() if x[1] is not None]
|
||||||
items.sort(lambda x,y: cmp(x[1], y[1]))
|
items.sort(key=lambda x: x[1])
|
||||||
ranked = [x[0] for x in items if x[1] is not None]
|
ranked = [x[0] for x in items]
|
||||||
return ranked + ['' for x in range(len(self.items)-len(ranked))]
|
return ranked + ['' for x in range(len(self.items)-len(ranked))]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,8 @@ import re
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
from django.utils import six
|
||||||
|
|
||||||
from quixote import get_request, get_publisher, get_session
|
from quixote import get_request, get_publisher, get_session
|
||||||
from quixote.http_request import Upload
|
from quixote.http_request import Upload
|
||||||
|
|
||||||
|
@ -94,7 +96,7 @@ def get_dict_with_varnames(fields, data, formdata=None, varnames_only=False):
|
||||||
return new_data
|
return new_data
|
||||||
|
|
||||||
def flatten_dict(d):
|
def flatten_dict(d):
|
||||||
for k, v in d.items():
|
for k, v in list(d.items()):
|
||||||
if type(v) is dict:
|
if type(v) is dict:
|
||||||
flatten_dict(v)
|
flatten_dict(v)
|
||||||
for k2, v2 in v.items():
|
for k2, v2 in v.items():
|
||||||
|
@ -207,9 +209,9 @@ class Evolution(object):
|
||||||
# don't pickle _formata cache
|
# don't pickle _formata cache
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
odict = self.__dict__.copy()
|
odict = self.__dict__.copy()
|
||||||
if odict.has_key('_formdata'):
|
if '_formdata' in odict:
|
||||||
del odict['_formdata']
|
del odict['_formdata']
|
||||||
if odict.has_key('_display_parts'):
|
if '_display_parts' in odict:
|
||||||
del odict['_display_parts']
|
del odict['_display_parts']
|
||||||
return odict
|
return odict
|
||||||
|
|
||||||
|
@ -407,7 +409,7 @@ class FormData(StorableObject):
|
||||||
if field.prefill and field.prefill.get('type') == 'user':
|
if field.prefill and field.prefill.get('type') == 'user':
|
||||||
form_user_data[field.prefill['value']] = self.data.get(field.id)
|
form_user_data[field.prefill['value']] = self.data.get(field.id)
|
||||||
user_label = ' '.join([form_user_data.get(x) for x in field_name_values
|
user_label = ' '.join([form_user_data.get(x) for x in field_name_values
|
||||||
if isinstance(form_user_data.get(x), basestring)])
|
if isinstance(form_user_data.get(x), six.string_types)])
|
||||||
if user_label != self.user_label:
|
if user_label != self.user_label:
|
||||||
self.user_label = user_label
|
self.user_label = user_label
|
||||||
changed = True
|
changed = True
|
||||||
|
@ -1073,16 +1075,14 @@ class FormData(StorableObject):
|
||||||
|
|
||||||
if self.geolocations:
|
if self.geolocations:
|
||||||
data['geolocations'] = {}
|
data['geolocations'] = {}
|
||||||
for k, v in self.geolocations.iteritems():
|
for k, v in self.geolocations.items():
|
||||||
data['geolocations'][k] = v.copy()
|
data['geolocations'][k] = v.copy()
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def export_to_json(self, include_files=True, anonymise=False):
|
def export_to_json(self, include_files=True, anonymise=False):
|
||||||
data = self.get_json_export_dict(include_files=include_files, anonymise=anonymise)
|
data = self.get_json_export_dict(include_files=include_files, anonymise=anonymise)
|
||||||
return json.dumps(data,
|
return json.dumps(data, cls=misc.JSONEncoder)
|
||||||
cls=misc.JSONEncoder,
|
|
||||||
encoding=get_publisher().site_charset)
|
|
||||||
|
|
||||||
def mark_as_being_visited(self):
|
def mark_as_being_visited(self):
|
||||||
object_key = 'formdata-%s-%s' % (self.formdef.url_name, self.id)
|
object_key = 'formdata-%s-%s' % (self.formdef.url_name, self.id)
|
||||||
|
@ -1107,7 +1107,7 @@ class FormData(StorableObject):
|
||||||
# don't pickle _formdef cache
|
# don't pickle _formdef cache
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
odict = self.__dict__
|
odict = self.__dict__
|
||||||
if odict.has_key('_formdef'):
|
if '_formdef' in odict:
|
||||||
del odict['_formdef']
|
del odict['_formdef']
|
||||||
return odict
|
return odict
|
||||||
|
|
||||||
|
|
105
wcs/formdef.py
105
wcs/formdef.py
|
@ -17,17 +17,21 @@
|
||||||
import base64
|
import base64
|
||||||
import copy
|
import copy
|
||||||
import glob
|
import glob
|
||||||
import new
|
import itertools
|
||||||
import pickle
|
import pickle
|
||||||
import sys
|
import sys
|
||||||
|
import types
|
||||||
import json
|
import json
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
from django.utils import six
|
||||||
|
from django.utils.encoding import force_bytes, force_text
|
||||||
|
|
||||||
from quixote import get_request, get_publisher
|
from quixote import get_request, get_publisher
|
||||||
from quixote.http_request import Upload
|
from quixote.http_request import Upload
|
||||||
|
|
||||||
from .qommon import _
|
from .qommon import _, force_str, PICKLE_KWARGS
|
||||||
from .qommon.storage import StorableObject, fix_key
|
from .qommon.storage import StorableObject, fix_key
|
||||||
from .qommon.cron import CronJob
|
from .qommon.cron import CronJob
|
||||||
from .qommon.form import *
|
from .qommon.form import *
|
||||||
|
@ -42,6 +46,9 @@ from .categories import Category
|
||||||
from . import fields
|
from . import fields
|
||||||
from . import data_sources
|
from . import data_sources
|
||||||
|
|
||||||
|
if not hasattr(types, 'ClassType'):
|
||||||
|
types.ClassType = type
|
||||||
|
|
||||||
|
|
||||||
class FormdefImportError(Exception):
|
class FormdefImportError(Exception):
|
||||||
def __init__(self, msg, details=None):
|
def __init__(self, msg, details=None):
|
||||||
|
@ -138,12 +145,12 @@ class FormDef(StorableObject):
|
||||||
# don't run migration on lightweight objects
|
# don't run migration on lightweight objects
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.__dict__.has_key('receiver'):
|
if 'receiver' in self.__dict__:
|
||||||
self.receiver_id = self.__dict__['receiver']
|
self.receiver_id = self.__dict__['receiver']
|
||||||
del self.__dict__['receiver']
|
del self.__dict__['receiver']
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if self.__dict__.has_key('category'):
|
if 'category' in self.__dict__:
|
||||||
self.category_id = self.__dict__['category']
|
self.category_id = self.__dict__['category']
|
||||||
del self.__dict__['category']
|
del self.__dict__['category']
|
||||||
changed = True
|
changed = True
|
||||||
|
@ -159,7 +166,7 @@ class FormDef(StorableObject):
|
||||||
|
|
||||||
if self.fields and type(self.fields[0]) is dict:
|
if self.fields and type(self.fields[0]) is dict:
|
||||||
for f in self.fields:
|
for f in self.fields:
|
||||||
if f.has_key('name'):
|
if 'name' in f:
|
||||||
f['label'] = f['name']
|
f['label'] = f['name']
|
||||||
del f['name']
|
del f['name']
|
||||||
self.fields = [FormField(**x) for x in self.fields]
|
self.fields = [FormField(**x) for x in self.fields]
|
||||||
|
@ -167,7 +174,7 @@ class FormDef(StorableObject):
|
||||||
f.id = str(i)
|
f.id = str(i)
|
||||||
for formdata in self.data_class().select():
|
for formdata in self.data_class().select():
|
||||||
for f in self.fields:
|
for f in self.fields:
|
||||||
if not formdata.data.has_key(f.label):
|
if not f.label in formdata.data:
|
||||||
continue
|
continue
|
||||||
formdata.data[f.id] = formdata.data[f.label]
|
formdata.data[f.id] = formdata.data[f.label]
|
||||||
del formdata.data[f.label]
|
del formdata.data[f.label]
|
||||||
|
@ -179,11 +186,11 @@ class FormDef(StorableObject):
|
||||||
# (200603)
|
# (200603)
|
||||||
self.fields = [x.real_field for x in self.fields]
|
self.fields = [x.real_field for x in self.fields]
|
||||||
|
|
||||||
if self.__dict__.has_key('public'):
|
if 'public' in self.__dict__:
|
||||||
del self.__dict__['public']
|
del self.__dict__['public']
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
if self.__dict__.has_key('receiver_id'):
|
if 'receiver_id' in self.__dict__:
|
||||||
# migration from a simple receiver role to workflow roles
|
# migration from a simple receiver role to workflow roles
|
||||||
if not self.workflow_roles:
|
if not self.workflow_roles:
|
||||||
self.workflow_roles = {}
|
self.workflow_roles = {}
|
||||||
|
@ -261,12 +268,12 @@ class FormDef(StorableObject):
|
||||||
if (get_publisher().is_using_postgresql() and not mode == 'files') or mode == 'sql':
|
if (get_publisher().is_using_postgresql() and not mode == 'files') or mode == 'sql':
|
||||||
from . import sql
|
from . import sql
|
||||||
table_name = sql.get_formdef_table_name(self)
|
table_name = sql.get_formdef_table_name(self)
|
||||||
cls = new.classobj(self.url_name.title(), (sql.SqlFormData,),
|
cls = types.ClassType(self.url_name.title(), (sql.SqlFormData,),
|
||||||
{'_formdef': self,
|
{'_formdef': self,
|
||||||
'_table_name': table_name})
|
'_table_name': table_name})
|
||||||
actions = sql.do_formdef_tables(self)
|
actions = sql.do_formdef_tables(self)
|
||||||
else:
|
else:
|
||||||
cls = new.classobj(self.url_name.title(), (FormData,),
|
cls = types.ClassType(self.url_name.title(), (FormData,),
|
||||||
{'_names': 'form-%s' % self.internal_identifier,
|
{'_names': 'form-%s' % self.internal_identifier,
|
||||||
'_formdef': self})
|
'_formdef': self})
|
||||||
actions = []
|
actions = []
|
||||||
|
@ -698,7 +705,7 @@ class FormDef(StorableObject):
|
||||||
display_value = field.store_display_value(d, field.id)
|
display_value = field.store_display_value(d, field.id)
|
||||||
if display_value is not None:
|
if display_value is not None:
|
||||||
d['%s_display' % field.id] = display_value
|
d['%s_display' % field.id] = display_value
|
||||||
elif d.has_key('%s_display' % field.id):
|
elif '%s_display' % field.id in d:
|
||||||
del d['%s_display' % field.id]
|
del d['%s_display' % field.id]
|
||||||
if d.get(field.id) is not None and field.store_structured_value:
|
if d.get(field.id) is not None and field.store_structured_value:
|
||||||
structured_value = field.store_structured_value(d, field.id)
|
structured_value = field.store_structured_value(d, field.id)
|
||||||
|
@ -721,11 +728,11 @@ class FormDef(StorableObject):
|
||||||
def export_to_json(self, include_id=False, indent=None, anonymise=True):
|
def export_to_json(self, include_id=False, indent=None, anonymise=True):
|
||||||
charset = get_publisher().site_charset
|
charset = get_publisher().site_charset
|
||||||
root = {}
|
root = {}
|
||||||
root['name'] = unicode(self.name, charset)
|
root['name'] = force_text(self.name, charset)
|
||||||
if include_id and self.id:
|
if include_id and self.id:
|
||||||
root['id'] = str(self.id)
|
root['id'] = str(self.id)
|
||||||
if self.category:
|
if self.category:
|
||||||
root['category'] = unicode(self.category.name, charset)
|
root['category'] = force_text(self.category.name, charset)
|
||||||
root['category_id'] = str(self.category.id)
|
root['category_id'] = str(self.category.id)
|
||||||
if self.workflow:
|
if self.workflow:
|
||||||
root['workflow'] = self.workflow.get_json_export_dict(include_id=include_id)
|
root['workflow'] = self.workflow.get_json_export_dict(include_id=include_id)
|
||||||
|
@ -758,7 +765,12 @@ class FormDef(StorableObject):
|
||||||
root['geolocations'] = self.geolocations.copy()
|
root['geolocations'] = self.geolocations.copy()
|
||||||
|
|
||||||
if self.workflow_options:
|
if self.workflow_options:
|
||||||
root['options'] = self.workflow_options
|
root['options'] = self.workflow_options.copy()
|
||||||
|
for k, v in list(root['options'].items()):
|
||||||
|
# convert time.struct_time to strings as python3 would
|
||||||
|
# serialize it as tuple.
|
||||||
|
if isinstance(v, time.struct_time):
|
||||||
|
root['options'][k] = time.strftime('%Y-%m-%dT%H:%M:%S', v)
|
||||||
|
|
||||||
if self.required_authentication_contexts:
|
if self.required_authentication_contexts:
|
||||||
root['required_authentication_contexts'] = self.required_authentication_contexts[:]
|
root['required_authentication_contexts'] = self.required_authentication_contexts[:]
|
||||||
|
@ -776,8 +788,8 @@ class FormDef(StorableObject):
|
||||||
return dict([(unicode2str(k), unicode2str(v)) for k, v in v.items()])
|
return dict([(unicode2str(k), unicode2str(v)) for k, v in v.items()])
|
||||||
elif isinstance(v, list):
|
elif isinstance(v, list):
|
||||||
return [unicode2str(x) for x in v]
|
return [unicode2str(x) for x in v]
|
||||||
elif isinstance(v, unicode):
|
elif isinstance(v, six.string_types):
|
||||||
return v.encode(charset)
|
return force_str(v)
|
||||||
else:
|
else:
|
||||||
return v
|
return v
|
||||||
|
|
||||||
|
@ -804,7 +816,7 @@ class FormDef(StorableObject):
|
||||||
and 'id' in value['workflow']):
|
and 'id' in value['workflow']):
|
||||||
formdef.workflow_id = value['workflow'].get('id')
|
formdef.workflow_id = value['workflow'].get('id')
|
||||||
elif 'workflow' in value:
|
elif 'workflow' in value:
|
||||||
if isinstance(value['workflow'], basestring):
|
if isinstance(value['workflow'], six.string_types):
|
||||||
workflow = value.get('workflow')
|
workflow = value.get('workflow')
|
||||||
else:
|
else:
|
||||||
workflow = value['workflow'].get('name')
|
workflow = value['workflow'].get('name')
|
||||||
|
@ -848,7 +860,7 @@ class FormDef(StorableObject):
|
||||||
filename = option_value['filename']
|
filename = option_value['filename']
|
||||||
upload = Upload(filename, content_type=option_value['content_type'])
|
upload = Upload(filename, content_type=option_value['content_type'])
|
||||||
new_value = UploadedFile(get_publisher().app_dir, filename, upload)
|
new_value = UploadedFile(get_publisher().app_dir, filename, upload)
|
||||||
new_value.set_content(base64.decodestring(option_value['content']))
|
new_value.set_content(base64.decodestring(force_bytes(option_value['content'])))
|
||||||
formdef.workflow_options[option_key] = new_value
|
formdef.workflow_options[option_key] = new_value
|
||||||
|
|
||||||
if value.get('geolocations'):
|
if value.get('geolocations'):
|
||||||
|
@ -868,7 +880,7 @@ class FormDef(StorableObject):
|
||||||
for text_attribute in list(self.TEXT_ATTRIBUTES):
|
for text_attribute in list(self.TEXT_ATTRIBUTES):
|
||||||
if not hasattr(self, text_attribute) or not getattr(self, text_attribute):
|
if not hasattr(self, text_attribute) or not getattr(self, text_attribute):
|
||||||
continue
|
continue
|
||||||
ET.SubElement(root, text_attribute).text = unicode(
|
ET.SubElement(root, text_attribute).text = force_text(
|
||||||
getattr(self, text_attribute), charset)
|
getattr(self, text_attribute), charset)
|
||||||
for boolean_attribute in self.BOOLEAN_ATTRIBUTES:
|
for boolean_attribute in self.BOOLEAN_ATTRIBUTES:
|
||||||
if not hasattr(self, boolean_attribute):
|
if not hasattr(self, boolean_attribute):
|
||||||
|
@ -882,13 +894,13 @@ class FormDef(StorableObject):
|
||||||
|
|
||||||
if self.category:
|
if self.category:
|
||||||
elem = ET.SubElement(root, 'category')
|
elem = ET.SubElement(root, 'category')
|
||||||
elem.text = unicode(self.category.name, charset)
|
elem.text = force_text(self.category.name, charset)
|
||||||
if include_id:
|
if include_id:
|
||||||
elem.attrib['category_id'] = str(self.category.id)
|
elem.attrib['category_id'] = str(self.category.id)
|
||||||
|
|
||||||
if self.workflow:
|
if self.workflow:
|
||||||
elem = ET.SubElement(root, 'workflow')
|
elem = ET.SubElement(root, 'workflow')
|
||||||
elem.text = unicode(self.workflow.name, charset)
|
elem.text = force_text(self.workflow.name, charset)
|
||||||
if include_id:
|
if include_id:
|
||||||
elem.attrib['workflow_id'] = str(self.workflow.id)
|
elem.attrib['workflow_id'] = str(self.workflow.id)
|
||||||
|
|
||||||
|
@ -921,12 +933,12 @@ class FormDef(StorableObject):
|
||||||
continue
|
continue
|
||||||
role_id = str(role_id)
|
role_id = str(role_id)
|
||||||
if role_id.startswith('_') or role_id == 'logged-users':
|
if role_id.startswith('_') or role_id == 'logged-users':
|
||||||
role = unicode(role_id, charset)
|
role = force_text(role_id, charset)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
role = unicode(Role.get(role_id).name, charset)
|
role = force_text(Role.get(role_id).name, charset)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
role = unicode(role_id, charset)
|
role = force_text(role_id, charset)
|
||||||
sub = ET.SubElement(roles, 'role')
|
sub = ET.SubElement(roles, 'role')
|
||||||
if include_id:
|
if include_id:
|
||||||
sub.attrib['role_id'] = role_id
|
sub.attrib['role_id'] = role_id
|
||||||
|
@ -939,12 +951,12 @@ class FormDef(StorableObject):
|
||||||
continue
|
continue
|
||||||
role_id = str(role_id)
|
role_id = str(role_id)
|
||||||
if role_id.startswith('_') or role_id == 'logged-users':
|
if role_id.startswith('_') or role_id == 'logged-users':
|
||||||
role = unicode(role_id, charset)
|
role = force_text(role_id, charset)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
role = unicode(Role.get(role_id).name, charset)
|
role = force_text(Role.get(role_id).name, charset)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
role = unicode(role_id, charset)
|
role = force_text(role_id, charset)
|
||||||
sub = ET.SubElement(roles, 'role')
|
sub = ET.SubElement(roles, 'role')
|
||||||
sub.attrib['role_key'] = role_key
|
sub.attrib['role_key'] = role_key
|
||||||
if include_id:
|
if include_id:
|
||||||
|
@ -952,17 +964,17 @@ class FormDef(StorableObject):
|
||||||
sub.text = role
|
sub.text = role
|
||||||
|
|
||||||
options = ET.SubElement(root, 'options')
|
options = ET.SubElement(root, 'options')
|
||||||
for option in self.workflow_options or []:
|
for option in sorted(self.workflow_options or []):
|
||||||
element = ET.SubElement(options, 'option')
|
element = ET.SubElement(options, 'option')
|
||||||
element.attrib['varname'] = option
|
element.attrib['varname'] = option
|
||||||
option_value = self.workflow_options.get(option)
|
option_value = self.workflow_options.get(option)
|
||||||
if isinstance(option_value, basestring):
|
if isinstance(option_value, six.string_types):
|
||||||
element.text = unicode(self.workflow_options.get(option, ''), charset)
|
element.text = force_text(self.workflow_options.get(option, ''), charset)
|
||||||
elif hasattr(option_value, 'base_filename'):
|
elif hasattr(option_value, 'base_filename'):
|
||||||
ET.SubElement(element, 'filename').text = option_value.base_filename
|
ET.SubElement(element, 'filename').text = option_value.base_filename
|
||||||
ET.SubElement(element, 'content_type').text = (
|
ET.SubElement(element, 'content_type').text = (
|
||||||
option_value.content_type or 'application/octet-stream')
|
option_value.content_type or 'application/octet-stream')
|
||||||
ET.SubElement(element, 'content').text = base64.b64encode(option_value.get_content())
|
ET.SubElement(element, 'content').text = force_text(base64.b64encode(option_value.get_content()))
|
||||||
elif isinstance(option_value, time.struct_time):
|
elif isinstance(option_value, time.struct_time):
|
||||||
element.text = time.strftime('%Y-%m-%d', option_value)
|
element.text = time.strftime('%Y-%m-%d', option_value)
|
||||||
element.attrib['type'] = 'date'
|
element.attrib['type'] = 'date'
|
||||||
|
@ -973,12 +985,12 @@ class FormDef(StorableObject):
|
||||||
for geoloc_key, geoloc_label in (self.geolocations or {}).items():
|
for geoloc_key, geoloc_label in (self.geolocations or {}).items():
|
||||||
element = ET.SubElement(geolocations, 'geolocation')
|
element = ET.SubElement(geolocations, 'geolocation')
|
||||||
element.attrib['key'] = geoloc_key
|
element.attrib['key'] = geoloc_key
|
||||||
element.text = unicode(geoloc_label, charset)
|
element.text = force_text(geoloc_label, charset)
|
||||||
|
|
||||||
if self.required_authentication_contexts:
|
if self.required_authentication_contexts:
|
||||||
element = ET.SubElement(root, 'required_authentication_contexts')
|
element = ET.SubElement(root, 'required_authentication_contexts')
|
||||||
for auth_context in self.required_authentication_contexts:
|
for auth_context in self.required_authentication_contexts:
|
||||||
ET.SubElement(element, 'method').text = unicode(auth_context)
|
ET.SubElement(element, 'method').text = force_text(auth_context)
|
||||||
|
|
||||||
return root
|
return root
|
||||||
|
|
||||||
|
@ -1030,6 +1042,7 @@ class FormDef(StorableObject):
|
||||||
def import_from_xml_tree(cls, tree, include_id=False, charset=None, fix_on_error=False):
|
def import_from_xml_tree(cls, tree, include_id=False, charset=None, fix_on_error=False):
|
||||||
if charset is None:
|
if charset is None:
|
||||||
charset = get_publisher().site_charset
|
charset = get_publisher().site_charset
|
||||||
|
assert charset == 'utf-8'
|
||||||
formdef = cls()
|
formdef = cls()
|
||||||
if tree.find('name') is None or not tree.find('name').text:
|
if tree.find('name') is None or not tree.find('name').text:
|
||||||
raise FormdefImportError(N_('Missing name'))
|
raise FormdefImportError(N_('Missing name'))
|
||||||
|
@ -1048,7 +1061,7 @@ class FormDef(StorableObject):
|
||||||
value = tree.find(text_attribute)
|
value = tree.find(text_attribute)
|
||||||
if value is None:
|
if value is None:
|
||||||
continue
|
continue
|
||||||
setattr(formdef, text_attribute, value.text.encode(charset))
|
setattr(formdef, text_attribute, force_str(value.text))
|
||||||
|
|
||||||
for boolean_attribute in cls.BOOLEAN_ATTRIBUTES:
|
for boolean_attribute in cls.BOOLEAN_ATTRIBUTES:
|
||||||
value = tree.find(boolean_attribute)
|
value = tree.find(boolean_attribute)
|
||||||
|
@ -1082,12 +1095,12 @@ class FormDef(StorableObject):
|
||||||
if option.attrib.get('type') == 'date':
|
if option.attrib.get('type') == 'date':
|
||||||
option_value = time.strptime(option.text, '%Y-%m-%d')
|
option_value = time.strptime(option.text, '%Y-%m-%d')
|
||||||
elif option.text:
|
elif option.text:
|
||||||
option_value = option.text.encode(charset)
|
option_value = force_str(option.text)
|
||||||
elif option.findall('filename'):
|
elif option.findall('filename'):
|
||||||
filename = option.find('filename').text
|
filename = option.find('filename').text
|
||||||
upload = Upload(filename, content_type=option.find('content_type').text)
|
upload = Upload(filename, content_type=option.find('content_type').text)
|
||||||
option_value = UploadedFile(get_publisher().app_dir, filename, upload)
|
option_value = UploadedFile(get_publisher().app_dir, filename, upload)
|
||||||
option_value.set_content(base64.decodestring(option.find('content').text))
|
option_value.set_content(base64.decodestring(force_bytes(option.find('content').text)))
|
||||||
formdef.workflow_options[option.attrib.get('varname')] = option_value
|
formdef.workflow_options[option.attrib.get('varname')] = option_value
|
||||||
|
|
||||||
if tree.find('last_modification') is not None:
|
if tree.find('last_modification') is not None:
|
||||||
|
@ -1103,7 +1116,7 @@ class FormDef(StorableObject):
|
||||||
if Category.has_key(category_id):
|
if Category.has_key(category_id):
|
||||||
formdef.category_id = category_id
|
formdef.category_id = category_id
|
||||||
else:
|
else:
|
||||||
category = category_node.text.encode(charset)
|
category = force_str(category_node.text)
|
||||||
for c in Category.select():
|
for c in Category.select():
|
||||||
if c.name == category:
|
if c.name == category:
|
||||||
formdef.category_id = c.id
|
formdef.category_id = c.id
|
||||||
|
@ -1117,7 +1130,7 @@ class FormDef(StorableObject):
|
||||||
if Workflow.has_key(workflow_id):
|
if Workflow.has_key(workflow_id):
|
||||||
formdef.workflow_id = workflow_id
|
formdef.workflow_id = workflow_id
|
||||||
else:
|
else:
|
||||||
workflow = workflow_node.text.encode(charset)
|
workflow = force_str(workflow_node.text)
|
||||||
for w in Workflow.select():
|
for w in Workflow.select():
|
||||||
if w.name == workflow:
|
if w.name == workflow:
|
||||||
formdef.workflow_id = w.id
|
formdef.workflow_id = w.id
|
||||||
|
@ -1125,7 +1138,7 @@ class FormDef(StorableObject):
|
||||||
|
|
||||||
def get_role_by_node(role_node):
|
def get_role_by_node(role_node):
|
||||||
role_id = None
|
role_id = None
|
||||||
value = role_node.text.encode(charset)
|
value = force_str(role_node.text)
|
||||||
if value.startswith('_') or value == 'logged-users':
|
if value.startswith('_') or value == 'logged-users':
|
||||||
role_id = value
|
role_id = value
|
||||||
elif include_id:
|
elif include_id:
|
||||||
|
@ -1170,7 +1183,7 @@ class FormDef(StorableObject):
|
||||||
formdef.geolocations = {}
|
formdef.geolocations = {}
|
||||||
for child in geolocations_node:
|
for child in geolocations_node:
|
||||||
geoloc_key = child.attrib['key']
|
geoloc_key = child.attrib['key']
|
||||||
geoloc_value = child.text.encode(charset)
|
geoloc_value = force_str(child.text)
|
||||||
formdef.geolocations[geoloc_key] = geoloc_value
|
formdef.geolocations[geoloc_key] = geoloc_value
|
||||||
|
|
||||||
if tree.find('required_authentication_contexts') is not None:
|
if tree.find('required_authentication_contexts') is not None:
|
||||||
|
@ -1421,11 +1434,11 @@ class FormDef(StorableObject):
|
||||||
# don't pickle computed attributes
|
# don't pickle computed attributes
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
odict = copy.copy(self.__dict__)
|
odict = copy.copy(self.__dict__)
|
||||||
if odict.has_key('_workflow'):
|
if '_workflow' in odict:
|
||||||
del odict['_workflow']
|
del odict['_workflow']
|
||||||
if odict.has_key('_start_page'):
|
if '_start_page' in odict:
|
||||||
del odict['_start_page']
|
del odict['_start_page']
|
||||||
if self.lightweight and odict.has_key('fields'):
|
if self.lightweight and 'fields' in odict:
|
||||||
# will be stored independently
|
# will be stored independently
|
||||||
del odict['fields']
|
del odict['fields']
|
||||||
return odict
|
return odict
|
||||||
|
@ -1443,7 +1456,7 @@ class FormDef(StorableObject):
|
||||||
return o
|
return o
|
||||||
if cls.lightweight:
|
if cls.lightweight:
|
||||||
try:
|
try:
|
||||||
o.fields = pickle.load(fd)
|
o.fields = pickle.load(fd, **PICKLE_KWARGS)
|
||||||
except EOFError:
|
except EOFError:
|
||||||
pass # old format
|
pass # old format
|
||||||
return o
|
return o
|
||||||
|
@ -1579,7 +1592,9 @@ def clean_unused_files(publisher):
|
||||||
if is_upload(option_data):
|
if is_upload(option_data):
|
||||||
yield option_data.get_filename()
|
yield option_data.get_filename()
|
||||||
for formdata in formdef.data_class().select(ignore_errors=True):
|
for formdata in formdef.data_class().select(ignore_errors=True):
|
||||||
for field_data in (formdata.data or {}).values() + (formdata.workflow_data or {}).values():
|
for field_data in itertools.chain(
|
||||||
|
(formdata.data or {}).values(),
|
||||||
|
(formdata.workflow_data or {}).values()):
|
||||||
if is_upload(field_data):
|
if is_upload(field_data):
|
||||||
yield field_data.get_filename()
|
yield field_data.get_filename()
|
||||||
for evolution in (formdata.evolution or []):
|
for evolution in (formdata.evolution or []):
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import urllib
|
from django.utils.six.moves.urllib import parse as urllib
|
||||||
|
|
||||||
from quixote import get_request, get_publisher, redirect
|
from quixote import get_request, get_publisher, redirect
|
||||||
from quixote.html import htmltext, TemplateIO
|
from quixote.html import htmltext, TemplateIO
|
||||||
|
@ -189,7 +189,7 @@ class FormDefUI(object):
|
||||||
item_ids_dict = {x: True for x in item_ids}
|
item_ids_dict = {x: True for x in item_ids}
|
||||||
item_ids = [x for x in ordered_ids if x in item_ids_dict]
|
item_ids = [x for x in ordered_ids if x in item_ids_dict]
|
||||||
else:
|
else:
|
||||||
item_ids.sort(lambda x,y: cmp(int(x), int(y)))
|
item_ids.sort(key=lambda x: int(x))
|
||||||
item_ids.reverse()
|
item_ids.reverse()
|
||||||
|
|
||||||
total_count = len(item_ids)
|
total_count = len(item_ids)
|
||||||
|
|
|
@ -395,7 +395,7 @@ class FormStatusPage(Directory, FormTemplateMixin):
|
||||||
current_page_fields = []
|
current_page_fields = []
|
||||||
|
|
||||||
def get_value(f):
|
def get_value(f):
|
||||||
if not self.filled.data.has_key(f.id):
|
if f.id not in self.filled.data:
|
||||||
value = None
|
value = None
|
||||||
else:
|
else:
|
||||||
if f.store_display_value and ('%s_display' % f.id) in self.filled.data:
|
if f.store_display_value and ('%s_display' % f.id) in self.filled.data:
|
||||||
|
@ -441,7 +441,7 @@ class FormStatusPage(Directory, FormTemplateMixin):
|
||||||
# ignore empty pages
|
# ignore empty pages
|
||||||
fields = []
|
fields = []
|
||||||
for page in pages:
|
for page in pages:
|
||||||
if not any([x.has_key('value') for x in page['fields']]):
|
if not any([bool('value' in x) for x in page['fields']]):
|
||||||
continue
|
continue
|
||||||
fields.append(page['page'])
|
fields.append(page['page'])
|
||||||
fields.extend([x['field'] for x in page['fields']])
|
fields.extend([x['field'] for x in page['fields']])
|
||||||
|
|
|
@ -25,6 +25,7 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
qrcode = None
|
qrcode = None
|
||||||
|
|
||||||
|
from django.utils import six
|
||||||
from django.utils.http import quote
|
from django.utils.http import quote
|
||||||
from django.utils.six import StringIO
|
from django.utils.six import StringIO
|
||||||
from django.utils.safestring import mark_safe
|
from django.utils.safestring import mark_safe
|
||||||
|
@ -317,14 +318,14 @@ class FormPage(Directory, FormTemplateMixin):
|
||||||
else:
|
else:
|
||||||
form_data = {}
|
form_data = {}
|
||||||
|
|
||||||
if page == self.pages[0] and not get_request().form.has_key('magictoken'):
|
if page == self.pages[0] and 'magictoken' not in get_request().form:
|
||||||
magictoken = randbytes(8)
|
magictoken = randbytes(8)
|
||||||
else:
|
else:
|
||||||
magictoken = get_request().form['magictoken']
|
magictoken = get_request().form['magictoken']
|
||||||
form.add_hidden('magictoken', magictoken)
|
form.add_hidden('magictoken', magictoken)
|
||||||
data = session.get_by_magictoken(magictoken, {})
|
data = session.get_by_magictoken(magictoken, {})
|
||||||
|
|
||||||
if page == self.pages[0] and get_request().form.has_key('cancelurl'):
|
if page == self.pages[0] and 'cancelurl' in get_request().form:
|
||||||
cancelurl = get_request().form['cancelurl']
|
cancelurl = get_request().form['cancelurl']
|
||||||
form_data['__cancelurl'] = cancelurl
|
form_data['__cancelurl'] = cancelurl
|
||||||
session.add_magictoken(magictoken, form_data)
|
session.add_magictoken(magictoken, form_data)
|
||||||
|
@ -363,7 +364,7 @@ class FormPage(Directory, FormTemplateMixin):
|
||||||
# "live prefill", regardless of existing data.
|
# "live prefill", regardless of existing data.
|
||||||
form.get_widget('f%s' % k).prefill_attributes = field.get_prefill_attributes()
|
form.get_widget('f%s' % k).prefill_attributes = field.get_prefill_attributes()
|
||||||
|
|
||||||
if data.has_key(k):
|
if k in data:
|
||||||
v = data[k]
|
v = data[k]
|
||||||
elif field.prefill:
|
elif field.prefill:
|
||||||
if get_request().is_in_backoffice() and (
|
if get_request().is_in_backoffice() and (
|
||||||
|
@ -382,6 +383,12 @@ class FormPage(Directory, FormTemplateMixin):
|
||||||
if not isinstance(v, str) and field.convert_value_to_str:
|
if not isinstance(v, str) and field.convert_value_to_str:
|
||||||
v = field.convert_value_to_str(v)
|
v = field.convert_value_to_str(v)
|
||||||
form.get_widget('f%s' % k).set_value(v)
|
form.get_widget('f%s' % k).set_value(v)
|
||||||
|
if field.type == 'item' and form.get_widget('f%s' % k).value != v:
|
||||||
|
# mark field as invalid if the value was not accepted
|
||||||
|
# (this is required by quixote>=3 as the value would
|
||||||
|
# not be evaluated in the initial GET request of the
|
||||||
|
# page).
|
||||||
|
form.get_widget('f%s' % k).set_error(get_selection_error_text())
|
||||||
if verified:
|
if verified:
|
||||||
form.get_widget('f%s' % k).readonly = 'readonly'
|
form.get_widget('f%s' % k).readonly = 'readonly'
|
||||||
form.get_widget('f%s' % k).attrs['readonly'] = 'readonly'
|
form.get_widget('f%s' % k).attrs['readonly'] = 'readonly'
|
||||||
|
@ -623,12 +630,12 @@ class FormPage(Directory, FormTemplateMixin):
|
||||||
self.feed_current_data(get_request().form.get('magictoken'))
|
self.feed_current_data(get_request().form.get('magictoken'))
|
||||||
else:
|
else:
|
||||||
self.feed_current_data(None)
|
self.feed_current_data(None)
|
||||||
if not self.edit_mode and get_request().get_method() == 'GET' and not get_request().form.has_key('mt'):
|
if not self.edit_mode and get_request().get_method() == 'GET' and 'mt' not in get_request().form:
|
||||||
self.initial_hit = True
|
self.initial_hit = True
|
||||||
# first hit on first page, if tracking code are enabled and we
|
# first hit on first page, if tracking code are enabled and we
|
||||||
# are not editing an existing formdata, generate a new tracking
|
# are not editing an existing formdata, generate a new tracking
|
||||||
# code.
|
# code.
|
||||||
if not self.edit_mode and self.formdef.enable_tracking_codes and not get_request().form.has_key('mt'):
|
if not self.edit_mode and self.formdef.enable_tracking_codes and 'mt' not in get_request().form:
|
||||||
tracking_code = get_publisher().tracking_code_class()
|
tracking_code = get_publisher().tracking_code_class()
|
||||||
tracking_code.store()
|
tracking_code.store()
|
||||||
token = randbytes(8)
|
token = randbytes(8)
|
||||||
|
@ -663,7 +670,7 @@ class FormPage(Directory, FormTemplateMixin):
|
||||||
form.add_submit('savedraft')
|
form.add_submit('savedraft')
|
||||||
|
|
||||||
if not form.is_submitted():
|
if not form.is_submitted():
|
||||||
if get_request().form.has_key('mt'):
|
if 'mt' in get_request().form:
|
||||||
magictoken = get_request().form['mt']
|
magictoken = get_request().form['mt']
|
||||||
data = session.get_by_magictoken(magictoken, {})
|
data = session.get_by_magictoken(magictoken, {})
|
||||||
if not get_request().is_in_backoffice():
|
if not get_request().is_in_backoffice():
|
||||||
|
@ -737,7 +744,7 @@ class FormPage(Directory, FormTemplateMixin):
|
||||||
continue
|
continue
|
||||||
v, verified = field.get_prefill_value(user=prefill_user)
|
v, verified = field.get_prefill_value(user=prefill_user)
|
||||||
if verified:
|
if verified:
|
||||||
if not isinstance(v, basestring) and field.convert_value_to_str:
|
if not isinstance(v, six.string_types) and field.convert_value_to_str:
|
||||||
# convert structured data to strings as if they were
|
# convert structured data to strings as if they were
|
||||||
# submitted by the browser.
|
# submitted by the browser.
|
||||||
v = field.convert_value_to_str(v)
|
v = field.convert_value_to_str(v)
|
||||||
|
@ -859,7 +866,7 @@ class FormPage(Directory, FormTemplateMixin):
|
||||||
req = get_request()
|
req = get_request()
|
||||||
for field in self.formdef.fields:
|
for field in self.formdef.fields:
|
||||||
k = field.id
|
k = field.id
|
||||||
if form_data.has_key(k):
|
if k in form_data:
|
||||||
v = form_data[k]
|
v = form_data[k]
|
||||||
if field.convert_value_to_str:
|
if field.convert_value_to_str:
|
||||||
v = field.convert_value_to_str(v)
|
v = field.convert_value_to_str(v)
|
||||||
|
@ -1441,7 +1448,7 @@ class RootDirectory(AccessControlled, Directory):
|
||||||
for formdef in all_formdefs:
|
for formdef in all_formdefs:
|
||||||
user_forms.extend(get_user_forms(formdef))
|
user_forms.extend(get_user_forms(formdef))
|
||||||
user_forms = [x for x in user_forms if x.formdef.is_user_allowed_read(user, x)]
|
user_forms = [x for x in user_forms if x.formdef.is_user_allowed_read(user, x)]
|
||||||
user_forms.sort(lambda x,y: cmp(x.receipt_time, y.receipt_time))
|
user_forms.sort(key=lambda x: (x.receipt_time,))
|
||||||
|
|
||||||
if self.category:
|
if self.category:
|
||||||
r += self.form_list(list_forms, category = self.category,
|
r += self.form_list(list_forms, category = self.category,
|
||||||
|
|
|
@ -57,7 +57,7 @@ class HookDirectory(Directory):
|
||||||
else:
|
else:
|
||||||
raise errors.AccessForbiddenError('insufficient roles')
|
raise errors.AccessForbiddenError('insufficient roles')
|
||||||
|
|
||||||
if hasattr(get_request(), 'json'):
|
if hasattr(get_request(), '_json'):
|
||||||
workflow_data = {self.trigger.identifier: get_request().json}
|
workflow_data = {self.trigger.identifier: get_request().json}
|
||||||
self.formdata.update_workflow_data(workflow_data)
|
self.formdata.update_workflow_data(workflow_data)
|
||||||
self.formdata.store()
|
self.formdata.store()
|
||||||
|
|
|
@ -17,9 +17,9 @@
|
||||||
import json
|
import json
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import urllib
|
|
||||||
|
|
||||||
from django.http import HttpResponseBadRequest, HttpResponseRedirect
|
from django.http import HttpResponseBadRequest, HttpResponseRedirect
|
||||||
|
from django.utils.six.moves.urllib import parse as urllib
|
||||||
|
|
||||||
from quixote import get_publisher
|
from quixote import get_publisher
|
||||||
from quixote.errors import RequestError
|
from quixote.errors import RequestError
|
||||||
|
@ -63,7 +63,7 @@ class PublisherInitialisationMiddleware(object):
|
||||||
query_string_allowed_vars.split(',')]
|
query_string_allowed_vars.split(',')]
|
||||||
had_session_variables = False
|
had_session_variables = False
|
||||||
session_variables = {}
|
session_variables = {}
|
||||||
for k, v in compat_request.form.items():
|
for k, v in list(compat_request.form.items()):
|
||||||
if k.startswith('session_var_'):
|
if k.startswith('session_var_'):
|
||||||
had_session_variables = True
|
had_session_variables = True
|
||||||
session_variable = str(k[len('session_var_'):])
|
session_variable = str(k[len('session_var_'):])
|
||||||
|
|
|
@ -19,6 +19,7 @@ import hashlib
|
||||||
import urllib
|
import urllib
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
|
from django.utils.encoding import force_text
|
||||||
from django.utils.six.moves.urllib import parse as urlparse
|
from django.utils.six.moves.urllib import parse as urlparse
|
||||||
|
|
||||||
from .qommon import get_logger
|
from .qommon import get_logger
|
||||||
|
@ -85,13 +86,13 @@ def push_document(user, filename, stream):
|
||||||
charset = get_publisher().site_charset
|
charset = get_publisher().site_charset
|
||||||
payload = {}
|
payload = {}
|
||||||
if user.name_identifiers:
|
if user.name_identifiers:
|
||||||
payload['user_nameid'] = unicode(user.name_identifiers[0], 'ascii')
|
payload['user_nameid'] = force_text(user.name_identifiers[0], 'ascii')
|
||||||
elif user.email:
|
elif user.email:
|
||||||
payload['user_email'] = unicode(user.email, 'ascii')
|
payload['user_email'] = force_text(user.email, 'ascii')
|
||||||
payload['origin'] = urlparse.urlparse(get_publisher().get_frontoffice_url()).netloc
|
payload['origin'] = urlparse.urlparse(get_publisher().get_frontoffice_url()).netloc
|
||||||
payload['file_name'] = unicode(filename, charset)
|
payload['file_name'] = force_text(filename, charset)
|
||||||
stream.seek(0)
|
stream.seek(0)
|
||||||
payload['file_b64_content'] = base64.b64encode(stream.read())
|
payload['file_b64_content'] = force_text(base64.b64encode(stream.read()))
|
||||||
async_post = fargo_post_json_async('/api/documents/push/', payload)
|
async_post = fargo_post_json_async('/api/documents/push/', payload)
|
||||||
|
|
||||||
def afterjob(job):
|
def afterjob(job):
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue