2014-06-17 17:04:40 +02:00
|
|
|
# w.c.s. - web application for online forms
|
|
|
|
# Copyright (C) 2005-2014 Entr'ouvert
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2014-07-08 11:14:42 +02:00
|
|
|
import ConfigParser
|
2014-06-17 17:04:40 +02:00
|
|
|
import json
|
|
|
|
import os
|
2014-10-31 14:06:44 +01:00
|
|
|
import subprocess
|
2014-06-17 17:04:40 +02:00
|
|
|
import sys
|
2014-06-23 17:21:02 +02:00
|
|
|
import tempfile
|
2014-06-17 17:04:40 +02:00
|
|
|
import urllib2
|
2015-05-11 17:27:17 +02:00
|
|
|
import urlparse
|
|
|
|
import hashlib
|
2014-06-17 17:04:40 +02:00
|
|
|
|
2015-05-08 14:10:43 +02:00
|
|
|
from qommon.ctl import Command, make_option
|
2014-06-23 17:21:02 +02:00
|
|
|
from qommon.storage import atomic_write
|
2014-06-17 17:04:40 +02:00
|
|
|
|
2015-05-08 15:18:57 +02:00
|
|
|
from wcs.admin.settings import UserFieldsFormDef
|
|
|
|
from wcs.fields import StringField, EmailField
|
|
|
|
|
2014-06-17 17:04:40 +02:00
|
|
|
|
2014-11-14 13:53:52 +01:00
|
|
|
class NoChange(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2014-06-17 17:04:40 +02:00
|
|
|
class CmdCheckHobos(Command):
|
2015-02-12 10:28:59 +01:00
|
|
|
name = 'hobo_deploy'
|
2014-06-17 17:04:40 +02:00
|
|
|
|
2015-05-08 14:10:43 +02:00
|
|
|
def __init__(self):
|
|
|
|
Command.__init__(self, [
|
|
|
|
make_option('--ignore-timestamp', action='store_true',
|
|
|
|
dest='ignore_timestamp', default=False),
|
2015-12-18 23:15:19 +01:00
|
|
|
make_option('--redeploy', action='store_true', default=False),
|
2015-05-08 14:10:43 +02:00
|
|
|
])
|
|
|
|
|
2014-06-17 17:04:40 +02:00
|
|
|
def execute(self, base_options, sub_options, args):
|
2015-12-18 23:15:19 +01:00
|
|
|
if sub_options.redeploy:
|
|
|
|
sub_options.ignore_timestamp = True
|
|
|
|
for tenant in os.listdir(WcsPublisher.APP_DIR):
|
|
|
|
if tenant.endswith('.invalid'):
|
|
|
|
continue
|
|
|
|
hobo_json_path = os.path.join(tenant, 'hobo.json')
|
|
|
|
if not os.path.exists(hobo_json_path):
|
|
|
|
continue
|
|
|
|
hobo_json = json.load(open(hobo_json_path))
|
|
|
|
try:
|
|
|
|
me = [service for service in hobo_json['services'] if service['this'] is True][0]
|
|
|
|
except IndexError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
self.deploy(base_options, sub_options, [me['base_url'], hobo_json_path])
|
|
|
|
else:
|
|
|
|
self.deploy(base_options, sub_options, args)
|
|
|
|
|
|
|
|
def deploy(self, base_options, sub_options, args):
|
2014-06-17 17:04:40 +02:00
|
|
|
import publisher
|
|
|
|
|
2014-10-31 14:06:44 +01:00
|
|
|
self.base_options = base_options
|
2015-03-30 18:07:56 +02:00
|
|
|
if sub_options.extra:
|
|
|
|
if not self.config.has_section('extra'):
|
|
|
|
self.config.add_section('extra')
|
|
|
|
for i, extra in enumerate(sub_options.extra):
|
|
|
|
self.config.set('extra', 'cmd_line_extra_%d' % i, extra)
|
|
|
|
publisher.WcsPublisher.configure(self.config)
|
2014-06-17 17:04:40 +02:00
|
|
|
pub = publisher.WcsPublisher.create_publisher()
|
|
|
|
|
|
|
|
global_app_dir = pub.app_dir
|
2015-02-12 10:38:02 +01:00
|
|
|
base_url = args[0]
|
2014-10-31 12:44:43 +01:00
|
|
|
|
2015-02-12 10:38:02 +01:00
|
|
|
if args[1] == '-':
|
2015-02-12 10:27:16 +01:00
|
|
|
# get environment definition from stdin
|
|
|
|
self.all_services = json.load(sys.stdin)
|
|
|
|
else:
|
2015-02-12 10:38:02 +01:00
|
|
|
self.all_services = json.load(file(args[1]))
|
|
|
|
|
|
|
|
service = [x for x in self.all_services.get('services', []) if \
|
|
|
|
x.get('service-id') == 'wcs' and x.get('base_url') == base_url][0]
|
2015-12-18 23:05:17 +01:00
|
|
|
service['this'] = True
|
2015-06-08 10:05:55 +02:00
|
|
|
if base_url.endswith('/'): # wcs doesn't expect a trailing slash
|
|
|
|
service['base_url'] = base_url[:-1]
|
2015-02-12 10:38:02 +01:00
|
|
|
|
|
|
|
pub.app_dir = os.path.join(global_app_dir,
|
|
|
|
self.get_instance_path(service))
|
|
|
|
if not os.path.exists(pub.app_dir):
|
|
|
|
print 'initializing instance in', pub.app_dir
|
|
|
|
os.mkdir(pub.app_dir)
|
|
|
|
pub.initialize_app_dir()
|
|
|
|
|
|
|
|
skeleton_filepath = os.path.join(global_app_dir, 'skeletons',
|
|
|
|
service.get('template_name'))
|
|
|
|
if os.path.exists(skeleton_filepath):
|
|
|
|
pub.import_zip(file(skeleton_filepath))
|
|
|
|
new_site = True
|
|
|
|
else:
|
|
|
|
print 'updating instance in', pub.app_dir
|
|
|
|
new_site = False
|
|
|
|
|
|
|
|
try:
|
2015-06-08 16:24:25 +02:00
|
|
|
self.configure_site_options(service, pub,
|
|
|
|
ignore_timestamp=sub_options.ignore_timestamp)
|
2015-02-12 10:38:02 +01:00
|
|
|
except NoChange:
|
2015-06-08 16:24:25 +02:00
|
|
|
print ' skipping'
|
|
|
|
return
|
2015-02-12 10:38:02 +01:00
|
|
|
|
2015-03-20 16:52:33 +01:00
|
|
|
pub.set_config()
|
2015-02-12 10:38:02 +01:00
|
|
|
self.update_configuration(service, pub)
|
|
|
|
self.configure_authentication_methods(service, pub)
|
|
|
|
if new_site:
|
|
|
|
self.configure_sql(service, pub)
|
2014-10-31 12:44:43 +01:00
|
|
|
|
2015-05-08 15:18:57 +02:00
|
|
|
self.update_profile(self.all_services.get('profile', {}), pub)
|
2015-12-18 23:05:17 +01:00
|
|
|
# Store hobo.json
|
|
|
|
atomic_write(os.path.join(pub.app_dir, 'hobo.json'), json.dumps(self.all_services))
|
2015-05-08 15:18:57 +02:00
|
|
|
|
2014-10-31 12:44:43 +01:00
|
|
|
def update_configuration(self, service, pub):
|
|
|
|
if not pub.cfg.get('misc'):
|
|
|
|
pub.cfg['misc'] = {}
|
|
|
|
pub.cfg['misc']['sitename'] = service.get('title').encode('utf-8')
|
2014-10-31 13:35:56 +01:00
|
|
|
pub.cfg['misc']['frontoffice-url'] = service.get('base_url').encode('utf-8')
|
2014-10-31 12:44:43 +01:00
|
|
|
pub.write_cfg()
|
|
|
|
|
2015-05-08 15:18:57 +02:00
|
|
|
def update_profile(self, profile, pub):
|
|
|
|
formdef = UserFieldsFormDef(publisher=pub)
|
|
|
|
profile_fields = {}
|
2015-05-19 11:40:12 +02:00
|
|
|
profile_field_ids = ['_' + x['name'] for x in profile.get('fields', [])]
|
2015-05-08 15:18:57 +02:00
|
|
|
for field in formdef.fields:
|
2015-05-19 11:40:12 +02:00
|
|
|
if field.id in profile_field_ids:
|
2015-05-15 11:22:11 +02:00
|
|
|
profile_fields[field.id] = field
|
2015-05-08 15:18:57 +02:00
|
|
|
|
|
|
|
# create or update profile fields
|
|
|
|
for attribute in profile.get('fields', []):
|
2015-05-15 11:22:11 +02:00
|
|
|
field_id = '_' + attribute['name']
|
|
|
|
if not field_id in profile_fields:
|
2015-05-08 15:18:57 +02:00
|
|
|
field_class, field_typename = StringField, 'string'
|
|
|
|
if attribute['kind'] == 'email':
|
|
|
|
field_class, field_typename = EmailField, 'email'
|
|
|
|
new_field = field_class(label=attribute['label'].encode('utf-8'),
|
|
|
|
type=field_typename,
|
|
|
|
varname=attribute['name'])
|
2015-05-15 11:22:11 +02:00
|
|
|
new_field.id = field_id
|
|
|
|
profile_fields[field_id] = new_field
|
2015-05-08 15:18:57 +02:00
|
|
|
else:
|
|
|
|
# remove it for the moment
|
2015-05-15 11:22:11 +02:00
|
|
|
formdef.fields.remove(profile_fields[field_id])
|
2015-05-08 15:18:57 +02:00
|
|
|
|
2015-05-15 11:22:11 +02:00
|
|
|
profile_fields[field_id].label = attribute['label'].encode('utf-8')
|
|
|
|
profile_fields[field_id].hint = attribute['description'].encode('utf-8')
|
|
|
|
profile_fields[field_id].required = attribute['required']
|
2015-05-08 15:18:57 +02:00
|
|
|
|
|
|
|
if attribute['disabled']:
|
2015-05-19 11:40:12 +02:00
|
|
|
profile_field_ids.remove('_' + attribute['name'])
|
2015-05-08 15:18:57 +02:00
|
|
|
|
|
|
|
# insert profile fields at the beginning
|
2015-05-19 11:40:12 +02:00
|
|
|
formdef.fields = [profile_fields[x] for x in profile_field_ids] + formdef.fields
|
2015-05-08 15:18:57 +02:00
|
|
|
formdef.store()
|
|
|
|
|
|
|
|
pub.cfg['users']['field_email'] = '_email'
|
|
|
|
pub.cfg['users']['field_name'] = ['_first_name', '_last_name']
|
|
|
|
pub.write_cfg()
|
|
|
|
|
|
|
|
# add mapping for SAML provisioning
|
|
|
|
for idp in pub.cfg.get('idp', {}).values():
|
2015-11-09 10:07:06 +01:00
|
|
|
if not idp.get('attribute-mapping'):
|
2015-05-08 15:18:57 +02:00
|
|
|
idp['attribute-mapping'] = {}
|
2015-05-19 11:40:12 +02:00
|
|
|
for field in profile.get('fields', []):
|
|
|
|
attribute_name = field['name']
|
|
|
|
field_id = '_' + attribute_name
|
|
|
|
if field_id in profile_field_ids:
|
|
|
|
idp['attribute-mapping'][str(attribute_name)] = str(field_id)
|
2015-05-08 15:18:57 +02:00
|
|
|
pub.write_cfg()
|
|
|
|
|
2014-10-31 12:44:43 +01:00
|
|
|
def configure_authentication_methods(self, service, pub):
|
|
|
|
# look for an identity provider
|
|
|
|
idps = [x for x in self.all_services.get('services', []) if x.get('service-id') == 'authentic']
|
|
|
|
if not pub.cfg.get('identification'):
|
|
|
|
pub.cfg['identification'] = {}
|
|
|
|
methods = pub.cfg['identification'].get('methods', [])
|
|
|
|
if idps and not 'idp' in methods:
|
|
|
|
methods.append('idp')
|
|
|
|
elif not idps and not 'password' in methods:
|
|
|
|
methods.append('password')
|
|
|
|
pub.cfg['identification']['methods'] = methods
|
|
|
|
pub.write_cfg()
|
|
|
|
|
|
|
|
if not idps:
|
|
|
|
return
|
|
|
|
|
|
|
|
# initialize service provider side
|
|
|
|
if not pub.cfg.get('sp', {}).get('publickey'):
|
|
|
|
from qommon.ident.idp import MethodAdminDirectory
|
|
|
|
if not pub.cfg.get('sp'):
|
|
|
|
pub.cfg['sp'] = {}
|
|
|
|
spconfig = pub.cfg['sp']
|
|
|
|
spconfig['saml2_base_url'] = str(service.get('base_url')) + '/saml'
|
|
|
|
spconfig['saml2_providerid'] = spconfig['saml2_base_url'] + '/metadata'
|
|
|
|
MethodAdminDirectory().generate_rsa_keypair()
|
|
|
|
|
2015-08-18 09:59:09 +02:00
|
|
|
if not 'saml_identities' in pub.cfg:
|
|
|
|
pub.cfg['saml_identities'] = {}
|
|
|
|
|
|
|
|
if idps:
|
|
|
|
pub.cfg['saml_identities']['identity-creation'] = 'self'
|
|
|
|
|
2015-08-21 02:37:59 +02:00
|
|
|
# write down configuration to disk as it will get reloaded
|
|
|
|
# automatically and we don't want to lose our changes.
|
|
|
|
pub.write_cfg()
|
|
|
|
|
2014-10-31 12:44:43 +01:00
|
|
|
for idp in idps:
|
2015-08-18 09:55:37 +02:00
|
|
|
if not idp['base_url'].endswith('/'):
|
|
|
|
idp['base_url'] = idp['base_url'] + '/'
|
|
|
|
metadata_url = '%sidp/saml2/metadata' % idp['base_url']
|
2014-10-31 12:44:43 +01:00
|
|
|
try:
|
|
|
|
rfd = urllib2.urlopen(metadata_url)
|
|
|
|
except (urllib2.HTTPError, urllib2.URLError), e:
|
|
|
|
print >> sys.stderr, 'failed to get metadata URL', metadata_url, e
|
|
|
|
continue
|
|
|
|
except Exception, e:
|
|
|
|
print >> sys.stderr, 'failed to get metadata URL', metadata_url, e
|
|
|
|
continue
|
|
|
|
|
|
|
|
s = rfd.read()
|
|
|
|
(bfd, metadata_pathname) = tempfile.mkstemp('.metadata')
|
|
|
|
atomic_write(metadata_pathname, s)
|
|
|
|
|
|
|
|
from qommon.ident.idp import AdminIDPDir
|
|
|
|
admin_dir = AdminIDPDir()
|
|
|
|
key_provider_id = admin_dir.submit_new_remote(
|
|
|
|
metadata_pathname, None, metadata_url, None)
|
2015-04-17 19:20:12 +02:00
|
|
|
admin_attribute = service.get('variables', {}).get('admin-attribute')
|
|
|
|
if not admin_attribute:
|
|
|
|
admin_attribute = 'is_superuser=true'
|
2015-05-18 09:26:54 +02:00
|
|
|
else:
|
|
|
|
admin_attribute = unicode(admin_attribute).encode('utf-8')
|
2015-04-17 19:20:12 +02:00
|
|
|
admin_attribute_dict = dict([admin_attribute.split('=')])
|
|
|
|
pub.cfg['idp'][key_provider_id]['admin-attributes'] = admin_attribute_dict
|
2015-05-14 20:30:42 +02:00
|
|
|
pub.cfg['idp'][key_provider_id]['nameidformat'] = 'unspecified'
|
2015-08-18 09:59:09 +02:00
|
|
|
pub.cfg['saml_identities']['registration-url'] = str(
|
|
|
|
'%saccounts/register/' % idp['base_url'])
|
2015-08-21 02:37:59 +02:00
|
|
|
pub.write_cfg()
|
2014-10-31 12:44:43 +01:00
|
|
|
|
|
|
|
def get_instance_path(self, service):
|
|
|
|
parsed_url = urllib2.urlparse.urlsplit(service.get('base_url'))
|
|
|
|
instance_path = parsed_url.netloc
|
|
|
|
if parsed_url.path:
|
|
|
|
instance_path = '%s+' % parsed_url.path.replace('/', '+')
|
|
|
|
return instance_path
|
|
|
|
|
2015-06-08 16:24:25 +02:00
|
|
|
def configure_site_options(self, current_service, pub, ignore_timestamp=False):
|
2014-10-31 12:44:43 +01:00
|
|
|
# configure site-options.cfg
|
|
|
|
config = ConfigParser.RawConfigParser()
|
|
|
|
site_options_filepath = os.path.join(pub.app_dir, 'site-options.cfg')
|
|
|
|
if os.path.exists(site_options_filepath):
|
|
|
|
config.read(site_options_filepath)
|
|
|
|
|
2015-06-08 16:24:25 +02:00
|
|
|
if not ignore_timestamp:
|
|
|
|
try:
|
|
|
|
if config.get('hobo', 'timestamp') == self.all_services.get('timestamp'):
|
|
|
|
raise NoChange()
|
|
|
|
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
|
|
|
|
pass
|
2014-11-14 13:53:52 +01:00
|
|
|
|
2014-10-31 12:44:43 +01:00
|
|
|
if not 'hobo' in config.sections():
|
|
|
|
config.add_section('hobo')
|
|
|
|
config.set('hobo', 'timestamp', self.all_services.get('timestamp'))
|
|
|
|
|
2015-03-09 10:08:15 +01:00
|
|
|
variables = {}
|
2015-05-11 17:27:17 +02:00
|
|
|
api_secrets = {}
|
2015-03-09 10:08:15 +01:00
|
|
|
for service in self.all_services.get('services', []):
|
|
|
|
variables['%s_url' % service.get('slug')] = service.get('base_url')
|
2015-05-22 13:28:34 +02:00
|
|
|
if not service.get('secret_key'):
|
|
|
|
continue
|
2015-05-11 17:27:17 +02:00
|
|
|
domain = urlparse.urlparse(service.get('base_url')).netloc.split(':')[0]
|
|
|
|
api_secrets[domain] = hashlib.sha1(domain + service.get('secret_key')).hexdigest()
|
2014-10-31 12:44:43 +01:00
|
|
|
if self.all_services.get('variables'):
|
2015-03-09 10:08:15 +01:00
|
|
|
for key, value in self.all_services.get('variables').items():
|
|
|
|
variables[key] = value
|
2015-03-18 18:39:48 +01:00
|
|
|
for key, value in current_service.get('variables', {}).items():
|
|
|
|
variables[key] = value
|
2015-03-09 10:08:15 +01:00
|
|
|
|
|
|
|
if variables:
|
2014-10-31 12:44:43 +01:00
|
|
|
if not 'variables' in config.sections():
|
|
|
|
config.add_section('variables')
|
2015-03-09 10:08:15 +01:00
|
|
|
for key, value in variables.items():
|
2015-03-09 18:15:28 +01:00
|
|
|
key = unicode(key).encode('utf-8')
|
|
|
|
value = unicode(value).encode('utf-8')
|
2014-10-31 12:44:43 +01:00
|
|
|
config.set('variables', key, value)
|
|
|
|
|
2015-05-11 17:27:17 +02:00
|
|
|
if not 'api-secrets' in config.sections():
|
|
|
|
config.add_section('api-secrets')
|
|
|
|
for key, value in api_secrets.items():
|
|
|
|
config.set('api-secrets', key, value)
|
|
|
|
|
2015-04-20 15:29:23 +02:00
|
|
|
if 'options' not in config.sections():
|
2015-04-20 14:27:56 +02:00
|
|
|
config.add_section('options')
|
2016-01-11 12:41:47 +01:00
|
|
|
|
|
|
|
# add known services
|
2015-04-20 14:27:56 +02:00
|
|
|
for service in self.all_services.get('services', []):
|
2016-01-11 12:41:47 +01:00
|
|
|
if service.get('service-id') == 'fargo':
|
|
|
|
config.set('options', 'fargo_url', service.get('base_url'))
|
|
|
|
if service.get('service-id') == 'welco':
|
|
|
|
config.set('options', 'welco_url', service.get('base_url'))
|
2015-04-20 14:27:56 +02:00
|
|
|
|
2015-06-08 16:25:29 +02:00
|
|
|
try:
|
|
|
|
portal_agent_url = config.get('variables', 'portal_agent_url')
|
|
|
|
except ConfigParser.NoOptionError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if portal_agent_url.endswith('/'):
|
|
|
|
portal_agent_url = portal_agent_url.rstrip('/')
|
|
|
|
extra_head = '''<script src="%s/__services.js"></script>'''\
|
|
|
|
'''<script src="%s/static/js/publik.js"></script>''' % (
|
|
|
|
portal_agent_url, portal_agent_url)
|
|
|
|
config.set('options', 'backoffice_extra_head', extra_head)
|
|
|
|
|
2014-10-31 12:44:43 +01:00
|
|
|
with open(site_options_filepath, 'wb') as site_options:
|
|
|
|
config.write(site_options)
|
2014-06-17 17:04:40 +02:00
|
|
|
|
2014-10-31 14:06:44 +01:00
|
|
|
def configure_sql(self, service, pub):
|
|
|
|
if not pub.cfg.get('postgresql'):
|
|
|
|
return
|
|
|
|
|
|
|
|
if not pub.has_site_option('postgresql'):
|
|
|
|
return
|
|
|
|
|
|
|
|
import psycopg2
|
|
|
|
import psycopg2.errorcodes
|
|
|
|
|
|
|
|
# determine database name using the instance path; if the template
|
|
|
|
# database name contained an underscore character, use the first part
|
|
|
|
# as a prefix to the database name
|
|
|
|
database_name = pub.cfg['postgresql'].get('database', 'wcs')
|
|
|
|
domain_table_name = self.get_instance_path(service).replace(
|
|
|
|
'-', '_').replace('.', '_').replace('+', '_')
|
|
|
|
if not domain_table_name in database_name:
|
|
|
|
database_name = '%s_%s' % (database_name.split('_')[0], domain_table_name)
|
|
|
|
|
|
|
|
postgresql_cfg = {}
|
|
|
|
for k, v in pub.cfg['postgresql'].items():
|
|
|
|
if v:
|
|
|
|
postgresql_cfg[k] = v
|
|
|
|
try:
|
|
|
|
pgconn = psycopg2.connect(**postgresql_cfg)
|
2015-09-14 12:30:27 +02:00
|
|
|
except psycopg2.Error as e:
|
2014-10-31 14:06:44 +01:00
|
|
|
print >> sys.stderr, 'failed to connect to postgresql (%s)' % \
|
|
|
|
psycopg2.errorcodes.lookup(e.pgcode)
|
|
|
|
return
|
|
|
|
|
|
|
|
pgconn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
|
|
|
|
cur = pgconn.cursor()
|
|
|
|
new_database = True
|
|
|
|
try:
|
|
|
|
cur.execute('''CREATE DATABASE %s''' % database_name)
|
|
|
|
except psycopg2.Error as e:
|
|
|
|
if e.pgcode == psycopg2.errorcodes.DUPLICATE_DATABASE:
|
|
|
|
new_database = False
|
|
|
|
else:
|
|
|
|
print >> sys.stderr, 'failed to create database (%s)' % \
|
|
|
|
psycopg2.errorcodes.lookup(e.pgcode)
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
cur.close()
|
|
|
|
|
2015-05-12 10:27:18 +02:00
|
|
|
postgresql_cfg['database'] = database_name
|
|
|
|
pub.cfg['postgresql'] = postgresql_cfg
|
2014-10-31 14:06:44 +01:00
|
|
|
pub.write_cfg()
|
|
|
|
|
|
|
|
if not new_database:
|
|
|
|
return
|
|
|
|
|
|
|
|
cmd = [sys.argv[0]]
|
|
|
|
if self.base_options.configfile:
|
|
|
|
cmd.extend(['-f', self.base_options.configfile])
|
|
|
|
cmd.append('convert-to-sql')
|
|
|
|
for param in ('database', 'user', 'password', 'host', 'port'):
|
|
|
|
if postgresql_cfg.get(param):
|
|
|
|
if param == 'database':
|
|
|
|
cmd.append('--dbname')
|
|
|
|
else:
|
|
|
|
cmd.append('--' + param)
|
|
|
|
cmd.append(str(postgresql_cfg.get(param)))
|
|
|
|
cmd.append(str(self.get_instance_path(service)))
|
|
|
|
subprocess.call(cmd)
|
|
|
|
|
|
|
|
|
2014-06-17 17:04:40 +02:00
|
|
|
CmdCheckHobos.register()
|