add publik-move to archives
This commit is contained in:
parent
48641cd9c1
commit
9748a225a5
|
@ -0,0 +1,87 @@
|
|||
[converted from https://dev.entrouvert.org/projects/sysadmin/wiki/D%C3%A9placement_Publik_Mutualise with html2text]
|
||||
|
||||
****** Déplacement Publik Mutualisé¶ ******
|
||||
* Déplacement_Publik_Mutualisé
|
||||
o Procédure
|
||||
# Avant
|
||||
# Pendant
|
||||
|
||||
Scripts de déploiement: git://git.entrouvert.org:publik-move.git
|
||||
* list-services.py doit être sur le vieux hobo
|
||||
|
||||
Utilisation du script :
|
||||
* publik-cluster-copy update : interroge le vieux hobo et stocke la liste
|
||||
des tenants et des services
|
||||
* publik-cluster-copy sync : copie les fichiers et les dumps des bases de
|
||||
données dans /srv/ceph/migration
|
||||
* publik-cluster-copy ls : liste les tenants
|
||||
* publik-cluster-copy take --tenant demarches-
|
||||
chateauroux.test.entrouvert.org : copie, adapte et pose les fichiers et
|
||||
les db
|
||||
* le flag --verbose permet de suivre les commandes exécutées
|
||||
|
||||
Pour une instance au-quotidien, procédure identique, nécessite le flag --auqo
|
||||
et --tenant :
|
||||
* ./publik-cluster-copy sync --auquo --tenant demo.test.au-quotidien.com --
|
||||
verbose
|
||||
* ./publik-cluster-copy take --auquo --tenant demo.test.au-quotidien.com --
|
||||
wcsdbname wcs_demo_test_au_quotidien_com --verbose
|
||||
|
||||
***** Procédure¶ *****
|
||||
**** Avant¶ ****
|
||||
1. Pré-synchroniser tous les tenants: ./publik-cluster-copy sync
|
||||
2. parcourir et dupliquer /etc/app/settings.d/ /etc/cron.hourly etc.
|
||||
* ssh app.test.entrouvert.org ls -l /etc/cron.hourly /etc/
|
||||
cron.daily /etc/cron.d
|
||||
* ssh app.test.entrouvert.org ls -l /etc/app/settings.d
|
||||
1. Préparer les prochains fichiers de zone DNS
|
||||
* à coups de :537,547 s/E[ \t]*.*/E tst/ ou de :537,547 s/E
|
||||
[ \t]*.*/E prod/
|
||||
* ne pas oublier les sous-domaines de au-quotidien.com!
|
||||
1. Vérifier si les certificats sont dispos sur le nouveau serveur
|
||||
2. Anticiper des dossiers posés sur des partitions différents (e.g. dossier
|
||||
uploads symlink vers /var/lib/wcs-au-quotidien-uploads/)
|
||||
3. Si nécessaire convertir wcs vers postgresql: CREATE DATABASE
|
||||
wcs_demo_test_au_quotidien_com WITH OWNER="wcs" TEMPLATE=template0
|
||||
LC_COLLATE='fr_FR.utf8' LC_CTYPE='fr_FR.utf8'; ensuite sudo -u wcs wcs-
|
||||
manage convert_to_sql -d demo.test.au-quotidien.com --database
|
||||
wcs_demo_test_au_quotidien_com
|
||||
|
||||
**** Pendant¶ ****
|
||||
1. couper les cron wcs sur l'ancien et le nouveau:
|
||||
ssh wcs.test.entrouvert.org sudo sed 's/^\*/##\*/' -i /etc/cron.d/wcs
|
||||
ssh wcs.node1.test.saas.entrouvert.org sudo sed 's/^\*/##\*/' -i /etc/cron.d/wcs
|
||||
# ou
|
||||
ssh wcs.entrouvert.org sudo sed 's/^\*/##\*/' -i /etc/cron.d/wcs
|
||||
ssh wcs.node1.prod.saas.entrouvert.org sudo sed 's/^\*/##\*/' -i /etc/cron.d/wcs
|
||||
o Vérifier que cron ne n'exécute plus avant de continuer : ssh wcs.
|
||||
{test|prod}.entrouvert.org ps waux | grep wcs-manage
|
||||
2. couper les vhost nginx: script qui déploie des 503 là ou c'est
|
||||
nécessaire:
|
||||
o sur gallinette/chicon, mettre à jour: /root/000_503-service-
|
||||
unavailable
|
||||
o lancer /root/diffuse-nginx-config
|
||||
3. sur le nouvel hyperviseur (e.g. node1.test.saas.entrouvert.org)
|
||||
$ sudo su -
|
||||
# cd /root/publik-move
|
||||
# git pull
|
||||
# ./publik-cluster-copy sync --tenant hobo-foobar.tst.entrouvert.org # prend du temps
|
||||
# ./publik-cluster-copy take --tenant hobo-foobar.tst.entrouvert.org
|
||||
4. invalider le client sur gallinette/chicon: tenant => tenant-migrated
|
||||
o ssh gallinette.entrouvert.org sudo /root/invalide-tenants-en-503
|
||||
5. dans le cas de tenant avec dossier uploads symlink vers /var/lib/wcs-au-
|
||||
quotidien-uploads/<site>, synchroniser ce dossier à la main
|
||||
upl=demarches.e-service.seine-et-marne.fr
|
||||
test -L /srv/ceph/lib/wcs/${upl}/uploads && echo "HAAAAA COPY"
|
||||
rm /srv/ceph/lib/wcs/${upl}/uploads
|
||||
rsync -apv --info=progress2 auquo.entrouvert.org:/var/lib/wcs-au-quotidien-
|
||||
uploads/$upl /srv/ceph/migration/wcs-au-quotidien-uploads/
|
||||
mv /srv/ceph/migration/wcs-au-quotidien-uploads/${upl}/uploads /srv/ceph/lib/wcs/${upl}/
|
||||
6. substituer le fichier de zone DNS, rndc reload
|
||||
7. attendre 2 minutes (ttl DNS) et tester le plus possible
|
||||
8. relancer les crons wcs sur l'ancien et le nouveau:
|
||||
ssh wcs.test.entrouvert.org sudo sed 's/^##\\*/\*/' -i /etc/cron.d/wcs
|
||||
ssh wcs.node1.test.saas.entrouvert.org sudo sed 's/^##\\*/\*/' -i /etc/cron.d/wcs
|
||||
# ou
|
||||
ssh wcs.entrouvert.org sudo sed 's/^##\\*/\*/' -i /etc/cron.d/wcs
|
||||
ssh wcs.node1.prod.saas.entrouvert.org sudo sed 's/^##\\*/\*/' -i /etc/cron.d/wcs
|
|
@ -0,0 +1,46 @@
|
|||
#! /usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import datetime
|
||||
|
||||
try:
|
||||
import psycopg2
|
||||
except ImportError:
|
||||
sys.exit(0)
|
||||
|
||||
conn = psycopg2.connect(dbname='postgres')
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('SELECT datname FROM pg_database WHERE datistemplate = false')
|
||||
databases = [x[0] for x in cursor.fetchall()]
|
||||
conn.close()
|
||||
|
||||
for database in databases:
|
||||
if database == 'postgres':
|
||||
continue
|
||||
|
||||
conn = psycopg2.connect(dbname=database)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('SELECT schema_name FROM information_schema.schemata')
|
||||
schemas = [x[0] for x in cursor.fetchall()]
|
||||
|
||||
backups_dir = os.path.join('/var/lib/postgresql/migration/dump', database)
|
||||
if not os.path.exists(backups_dir):
|
||||
os.makedirs(backups_dir, mode=0770)
|
||||
os.chmod(backups_dir, 0770)
|
||||
|
||||
# trigger backup mode for continuous archiving
|
||||
today = datetime.date.today()
|
||||
|
||||
for schema in schemas:
|
||||
if schema.startswith('pg_') or schema == 'information_schema':
|
||||
continue
|
||||
backup_filename = os.path.join(backups_dir, '%s.tar' % schema)
|
||||
subprocess.call(['pg_dump', '-n', schema, '-F', 'tar',
|
||||
'-f', backup_filename + '.new', database])
|
||||
if os.path.exists(backup_filename):
|
||||
os.unlink(backup_filename)
|
||||
os.rename(backup_filename + '.new', backup_filename)
|
||||
|
||||
conn.close()
|
|
@ -0,0 +1,110 @@
|
|||
import urlparse
|
||||
from hobo.environment.models import AVAILABLE_SERVICES
|
||||
from hobo.multitenant.middleware import TenantMiddleware
|
||||
from django.db import connection
|
||||
|
||||
bases_wcs = {
|
||||
'calvados.test.entrouvert.org': 'wcs_calvados_test_entrouvert_org',
|
||||
'cdg59.test.au-quotidien.com': '',
|
||||
'cud.au-quotidien.com': '',
|
||||
'demarches-alfortville.test.entrouvert.org': 'wcs-alfortville',
|
||||
'demarches-amiens.test.entrouvert.org': 'wcs_demarches_amiens_test_entrouvert_org',
|
||||
'demarches-arles.test.entrouvert.org': 'wcs_demarches_arles_test_entrouvert_org',
|
||||
'demarches-atreal.test.entrouvert.org': 'wcs_demarches_atreal_test_entrouvert_org',
|
||||
'demarches-auch.test.entrouvert.org': 'wcs_demarches_auch_test_entrouvert_org',
|
||||
'demarches-blois.test.entrouvert.org': 'wcs_demarches_blois_test_entrouvert_org',
|
||||
'demarches-cd22.test.entrouvert.org': 'wcs_cd22',
|
||||
'demarches-cd44.test.entrouvert.org': 'wcs_demarches_cd44_test_entrouvert_org',
|
||||
'demarches-chateauroux.test.entrouvert.org': 'wcs_demarches_chateauroux_test_entrouvert_org',
|
||||
'demarches-clisson.test.entrouvert.org': 'wcs_demarches_clisson_test_entrouvert_org',
|
||||
'demarches-cnil.test.entrouvert.org': 'wcs_demarches_cnil_test_entrouvert_org',
|
||||
'demarches-csma.test.entrouvert.org': 'wcs_demarches_csma_test_entrouvert_org',
|
||||
'demarches-departement06.test.entrouvert.org': 'wcs_departement06',
|
||||
'demarches-dreux.test.entrouvert.org': 'wcs_demarches_dreux_test_entrouvert_org',
|
||||
'demarches-e-service-seine-et-marne.test.entrouvert.org': 'wcs_demarches_e_service_seine_et_marne_test_entrouvert_org',
|
||||
'demarches-fsb.test.au-quotidien.com': 'wcs_demarches_fsb_test_au_quotidien_com',
|
||||
'demarches-gorges.test.entrouvert.org': 'wcs_demarches_gorges_test_entrouvert_org',
|
||||
'demarches-grenoble.test.entrouvert.org': 'wcs_demarches_grenoble_test_entrouvert_org',
|
||||
'demarches-haute-goulaine.test.entrouvert.org': 'wcs_demarches_haute_goulaine_test_entrouvert_org',
|
||||
'demarches-hautes-alpes.test.entrouvert.org': 'wcs_demarches_hautes_alpes_test_entrouvert_org',
|
||||
'demarches-implicit.test.entrouvert.org': 'wcs_demarches_implicit_test_entrouvert_org',
|
||||
'demarches-lareunion.test.entrouvert.org': 'wcs_demarches_lareunion_test_entrouvert_org',
|
||||
'demarches-lenord.test.entrouvert.org': 'wcs_demarches_lenord_test_entrouvert_org',
|
||||
'demarches-lenord.test.entrouvert.org.DELETED.invalid': 'wcs_demarches_lenord_test_entrouvert_org',
|
||||
'demarches-lozere.test.entrouvert.org': 'wcs_demarches_lozere_test_entrouvert_org',
|
||||
'demarches-matrik.test.entrouvert.org': 'wcs_demarches_matrik_test_entrouvert_org',
|
||||
'demarches-mauguio-carnon.test.entrouvert.org': 'wcs_demarches_mauguio_carnon_test_entrouvert_org',
|
||||
'demarches-metz.test.entrouvert.org': 'wcs_metz',
|
||||
'demarches-meudon.test.entrouvert.org': 'wcs_demarches_meudon_test_entrouvert_org',
|
||||
'demarches-meyzieu.test.au-quotidien.com': 'demarches_meyzieu_test_au_quotidien_com',
|
||||
'demarches-montsac-sicoval.test.entrouvert.org': 'wcs_demarches_montsac_sicoval_test_entrouvert_org',
|
||||
'demarches-nancy.test.entrouvert.org': 'wcs_demarches_nancy_test_entrouvert_org',
|
||||
'demarches-nancy.test.entrouvert.org.invalid': 'wcs_demarches_nancy_test_entrouvert_org',
|
||||
'demarches-orleans.test.entrouvert.org': 'demarches_orleans_test_entrouvert_org',
|
||||
'demarches-planesou-sicoval.test.entrouvert.org': 'wcs_demarches_planesou_sicoval_test_entrouvert_org',
|
||||
'demarches-publik-light.test.entrouvert.org': 'wcs_demarches_publik_light_test_entrouvert_org',
|
||||
'demarches-quimper.test.entrouvert.org': 'wcs_demarches_quimper_test_entrouvert_org',
|
||||
'demarches-rochefort.test.entrouvert.org.DELETED.invalid': 'wcs_demarches_rochefort_test_entrouvert_org',
|
||||
'demarches-rouen.test.entrouvert.org': 'wcs_demarches_rouen_test_entrouvert_org',
|
||||
'demarches-saint-chamond.test.entrouvert.org': 'wcs_venissieux',
|
||||
'demarches-saint-lo.test.entrouvert.org': 'wcs_demarches_saint_lo_test_entrouvert_org',
|
||||
'demarches-saone-et-loire.test.entrouvert.org': 'wcs_demarches_saone_et_loire_test_entrouvert_org',
|
||||
'demarches-sicoval.test.entrouvert.org': 'wcs_demarches_sicoval_test_entrouvert_org',
|
||||
'demarches-somme.test.entrouvert.org': 'wcs_demarches_somme_test_entrouvert_org',
|
||||
'demarches-strasbourg.test.entrouvert.org': 'wcs_demarches_strasbourg_test_entrouvert_org',
|
||||
'demarches-toulouse.test.entrouvert.org': 'wcs_demarches_toulouse_test_entrouvert_org',
|
||||
'demarches-tours.test.entrouvert.org': 'wcs_demarches_tours_test_entrouvert_org',
|
||||
'demarches-up.test.entrouvert.org': 'wcs_demarches_up_test_entrouvert_org',
|
||||
'demarches-validation.test.entrouvert.org': 'wcs_demarches_validation_test_entrouvert_org',
|
||||
'demarches-venissieux.test.entrouvert.org': 'wcs_venissieux',
|
||||
'demarches-vincennes.test.au-quotidien.com': 'wcs_demarches_vincennes_test_au_quotidien_com',
|
||||
'demarches2016.alfortville.fr': 'wcs_demarches2016_alfortville_fr',
|
||||
'demo-calvados.test.au-quotidien.com': '',
|
||||
'demo-cdg59.test.au-quotidien.com': '',
|
||||
'demo.test.au-quotidien.com': '',
|
||||
'e-megalis.test.au-quotidien.com': '',
|
||||
'echirolles.dev.au-quotidien.com': '',
|
||||
'eservice-cdg59.test.au-quotidien.com': '',
|
||||
'eservices-cch.test.entrouvert.org': 'wcs_eservices_cch_test_entrouvert_org',
|
||||
'fondettes.test.au-quotidien.com': 'wcs_fondettes_test_au_quotidien_com',
|
||||
'fsb.test.au-quotidien.com.invalid': 'wcs_demarches_fsb_test_au_quotidien_com',
|
||||
'graphviz.dev.au-quotidien.com': '',
|
||||
'greville-hague.test.entrouvert.org': 'wcs_greville_hague_test_entrouvert_org',
|
||||
'jerome.au-quotidien.com': '',
|
||||
'jerome2.test.au-quotidien.com': '',
|
||||
'jobourg.test.entrouvert.org': 'wcs_jobourg_test_entrouvert_org',
|
||||
'koshie.test.au-quotidien.com': '',
|
||||
'matrice.test.au-quotidien.com': '',
|
||||
'meaux.test.au-quotidien.com': 'wcs-meaux',
|
||||
'ng.dev.au-quotidien.com': '',
|
||||
'omonville-la-rogue.test.entrouvert.org': 'wcs_omonville_la_rogue_test_entrouvert_org',
|
||||
'orleans.test.au-quotidien.com': '',
|
||||
'paiement.test.au-quotidien.com': '',
|
||||
'poissy.test.au-quotidien.com': '',
|
||||
'sitiv.test.au-quotidien.com': '',
|
||||
'test.test.au-quotidien.com': '',
|
||||
'tonneville.test.entrouvert.org': 'wcs_tonneville_test_entrouvert_org',
|
||||
'urville-nacqueville.test.entrouvert.org': 'wcs_urville_nacqueville_test_entrouvert_org'
|
||||
}
|
||||
tenant = connection.get_tenant()
|
||||
# print '%s hobo-balancer' % tenant.domain_url
|
||||
# print 'hobo %s %s' % (tenant.domain_url, tenant.schema_name)
|
||||
|
||||
print "'%s': [" % tenant.domain_url,
|
||||
print "{'service': 'hobo', 'hostname': '%s', 'schema': '%s'}," % (tenant.domain_url, tenant.schema_name),
|
||||
|
||||
for service in AVAILABLE_SERVICES:
|
||||
for site in service.objects.all():
|
||||
servicename = site.Extra.service_id
|
||||
hostname = urlparse.urlsplit(site.base_url).netloc.split(':')[0]
|
||||
if site.secondary:
|
||||
#print '# (secondary) %s %s-balancer' % (hostname, servicename)
|
||||
continue
|
||||
if servicename == 'hobo':
|
||||
#print '# (hobo) %s %s-balancer' % (hostname, servicename)
|
||||
continue
|
||||
schema = TenantMiddleware.hostname2schema(hostname)
|
||||
if servicename == 'wcs':
|
||||
schema = bases_wcs[hostname]
|
||||
print "{'service': '%s', 'hostname': '%s', 'schema': '%s'}," % (servicename, hostname, schema),
|
||||
print '],'
|
|
@ -0,0 +1,227 @@
|
|||
#!/usr/bin/python3
|
||||
"""
|
||||
publik-cluster-copy tetragone.entrouvert.org welco --originbase /srv/backup/rsync/welco.test.entrouvert.org
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import pickle
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('action', choices=['sync', 'update', 'ls', 'take'])
|
||||
parser.add_argument('--tenant')
|
||||
parser.add_argument('--cached', action='store_true')
|
||||
parser.add_argument('--verbose', action='store_true')
|
||||
parser.add_argument('--originbase', default='')
|
||||
parser.add_argument('--auquo', action='store_true')
|
||||
parser.add_argument('--wcsdbname')
|
||||
args = parser.parse_args()
|
||||
|
||||
environment = '.'.join(socket.getfqdn().split('.')[1:])
|
||||
base = '/srv/nfs'
|
||||
m = '%s/migration' % base
|
||||
data_file = '%s/data.p' % m
|
||||
if not os.path.isdir(m):
|
||||
os.mkdir(m)
|
||||
if not os.path.isfile(data_file):
|
||||
with open(data_file, 'w') as fh:
|
||||
pickle.dump({}, open(data_file, "wb" ))
|
||||
|
||||
wcs_password = open('%s/etc/wcs/password' % base).read().strip()
|
||||
|
||||
data = pickle.load(open(data_file, 'rb'))
|
||||
|
||||
if environment == 'test.saas.entrouvert.org':
|
||||
old_environment = 'test.entrouvert.org'
|
||||
elif environment == 'prod.saas.entrouvert.org':
|
||||
old_environment = 'entrouvert.org'
|
||||
else:
|
||||
raise(Exception('unknown environment'))
|
||||
|
||||
|
||||
briques = [
|
||||
{'name': 'authentic', 'service': 'authentic2-multitenant'},
|
||||
{'name': 'bijoe', 'hostname': 'bi'},
|
||||
{'name': 'chrono'}, {'name': 'combo'}, {'name': 'corbo'}, {'name': 'fargo'},
|
||||
{'name': 'hobo'}, {'name': 'mandayejs'}, {'name': 'passerelle'},
|
||||
{'name': 'wcs'}, {'name': 'welco' }]
|
||||
|
||||
for i, b in enumerate(briques):
|
||||
if not 'service' in b.keys():
|
||||
briques[i]['service'] = b.get('name')
|
||||
if 'hostname' in b.keys():
|
||||
hostname = b['hostname']
|
||||
else:
|
||||
hostname = b.get('name')
|
||||
briques[i]['origin'] = '%s.%s' % (hostname, old_environment)
|
||||
briques[i]['originbase'] = '%s.%s:%s' % (hostname, old_environment, args.originbase)
|
||||
|
||||
briques_list = [b['name'] for b in briques]
|
||||
|
||||
|
||||
def run(cmd):
|
||||
if args.verbose:
|
||||
print(cmd)
|
||||
try:
|
||||
o = subprocess.run(cmd, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if args.verbose and o.stdout:
|
||||
print(o.stdout.decode())
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.stdout)
|
||||
print(e.stderr)
|
||||
raise
|
||||
return o
|
||||
|
||||
|
||||
def container(ctn, cmd):
|
||||
return run('systemd-run --machine %s --wait %s' % (ctn, cmd))
|
||||
|
||||
|
||||
def get_nginx_sites(name, originbase, **kwargs):
|
||||
mp = '%s/migration/%s/nginx' % (base, name)
|
||||
if not os.path.isdir(mp):
|
||||
run('mkdir -p %s' % mp)
|
||||
run('rsync -a %s/etc/nginx/sites-available %s' % (originbase, mp))
|
||||
run('rsync -a %s/etc/nginx/sites-enabled %s' % (originbase, mp))
|
||||
|
||||
|
||||
def _get_brique_files(originbase, service, relpath, mp):
|
||||
excludes = '--exclude "*.invalid/" --exclude whoosh_index/ --exclude form_tokens/'
|
||||
run('rsync -av --info=progress %s --delete %s/var/lib/%s %s' % (excludes, originbase, relpath, mp))
|
||||
|
||||
|
||||
def get_brique_files(name, service, originbase, **kwargs):
|
||||
mp = '%s/migration/%s' % (base, name)
|
||||
if not os.path.isdir(mp):
|
||||
run('mkdir -p %s' % mp)
|
||||
if service == 'wcs' and args.tenant:
|
||||
for tenant in args.tenant.split(','):
|
||||
for e in data['tenants'][tenant]:
|
||||
if e['service'] == 'wcs':
|
||||
n = e['hostname']
|
||||
print(n)
|
||||
mp = '%s/migration/wcs/wcs' % base
|
||||
_get_brique_files(originbase, service, 'wcs/%s' % n, mp)
|
||||
else:
|
||||
_get_brique_files(originbase, service, service, mp)
|
||||
|
||||
|
||||
def get_dumps(name, origin, originbase, **kwargs):
|
||||
mp = '%s/migration/%s' % (base, name)
|
||||
run('scp dump-schemas %s:/tmp' % origin)
|
||||
run('ssh %s sudo -u postgres python /tmp/dump-schemas' % origin)
|
||||
run('rsync -a --delete %s/var/lib/postgresql/migration/dump %s' % (originbase, mp))
|
||||
|
||||
|
||||
def import_schema(schema, db, tar):
|
||||
ctn = 'database.node1.%s' % environment
|
||||
container(ctn, '/usr/bin/sudo -u postgres /usr/bin/psql -c "drop schema if exists %s cascade;" %s' % (schema, db))
|
||||
container(ctn, '/usr/bin/sudo -u postgres LANG=C /usr/bin/pg_restore -d %s %s' % (db, tar))
|
||||
|
||||
|
||||
def import_database(db, tar):
|
||||
ctn = 'database.node1.%s' % environment
|
||||
container(ctn, '/usr/bin/sudo -u postgres /usr/bin/dropdb --if-exists %s' % db)
|
||||
container(ctn, '/usr/bin/sudo -u postgres /usr/bin/createdb %s --owner wcs --template="template0" --lc-collate=fr_FR.utf8 --lc-ctype=fr_FR.utf8;' % db)
|
||||
container(ctn, '/usr/bin/sudo -u postgres LANG=C /usr/bin/pg_restore -d %s %s' % (db, tar))
|
||||
|
||||
|
||||
def take(tenant=args.tenant):
|
||||
|
||||
for e in data['tenants'][tenant]:
|
||||
take_tenant(e['hostname'], e['service'], e['schema'])
|
||||
|
||||
|
||||
def take_tenant(hostname, service, schema):
|
||||
|
||||
if service not in briques_list:
|
||||
print('Unknown service %s' % service)
|
||||
return
|
||||
|
||||
if service == 'authentic':
|
||||
daemon = 'authentic2-multitenant'
|
||||
db = 'authentic2_multitenant'
|
||||
else:
|
||||
daemon = service
|
||||
db = service
|
||||
|
||||
ctn = 'database.node1.%s' % environment
|
||||
container(ctn, '/bin/chown -R postgres %s/migration/%s/dump' % (base, service))
|
||||
|
||||
if service == 'wcs':
|
||||
dbname = 'wcs_%s' % hostname.replace('-', '_').replace('.', '_')
|
||||
import_database(dbname, '%s/migration/wcs/dump/%s/public.tar' % (base, schema))
|
||||
orig = '%s/migration/wcs/wcs/%s' % (base, hostname)
|
||||
dest = '%s/lib/wcs/%s' % (base, hostname)
|
||||
# adapt_wcs(orig, dbname)
|
||||
# we need python2 pickle
|
||||
run('./update-config-pck.py %s/config.pck %s' % (orig, dbname))
|
||||
# adapt wcs-olap.ini, configuration is defined globally #26627
|
||||
if os.path.isfile('%s/wcs-olap.ini' % orig):
|
||||
run('sed s/^pg_dsn =/#pg_dsn =/ -i %s/wcs-olap.ini' % orig)
|
||||
else:
|
||||
import_schema(schema, db, '%s/migration/%s/dump/%s/%s.tar' % (base, service, db, schema))
|
||||
orig = '%s/migration/%s/%s/tenants/%s' % (base, service, daemon, hostname)
|
||||
dest = '%s/lib/%s/tenants/%s' % (base, daemon, hostname)
|
||||
|
||||
if os.path.isdir(dest):
|
||||
run('rm -r %s' % dest)
|
||||
|
||||
run('mv %s %s' % (orig, dest))
|
||||
|
||||
|
||||
def sync():
|
||||
for b in briques:
|
||||
run('ssh %s hostname' % b.get('origin'))
|
||||
|
||||
for b in briques:
|
||||
print(b['name'])
|
||||
get_nginx_sites(**b)
|
||||
get_brique_files(**b)
|
||||
get_dumps(**b)
|
||||
|
||||
|
||||
def update():
|
||||
if not args.cached:
|
||||
hobo = 'hobo.%s' % old_environment
|
||||
output = run('ssh %s sudo -u hobo hobo-manage tenant_command runscript /home/thomas/list-services.py --all-tenants -v0; ' % hobo)
|
||||
data['tenants'] = eval("{ %s }" % output.stdout.decode())
|
||||
pickle.dump(data, open(data_file, "wb" ))
|
||||
|
||||
|
||||
def ls():
|
||||
for t in data['tenants']:
|
||||
print(t)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if args.auquo:
|
||||
if not args.tenant:
|
||||
raise(Exception('no tenant specified'))
|
||||
|
||||
if args.wcsdbname:
|
||||
schemaname = args.wcsdbname
|
||||
else:
|
||||
schemaname = args.tenant.replace('-', '_').replace('.', '_')
|
||||
origin = [b['origin'] for b in briques if b['service'] == 'wcs'][0]
|
||||
originbase = [b['originbase'] for b in briques if b['service'] == 'wcs'][0]
|
||||
|
||||
if args.action == 'sync':
|
||||
mp = '%s/migration/wcs/wcs' % base
|
||||
_get_brique_files(originbase, 'wcs', 'wcs/%s' % args.tenant, mp)
|
||||
get_dumps('wcs', origin, originbase)
|
||||
|
||||
elif args.action == 'take':
|
||||
take_tenant(args.tenant, 'wcs', schemaname)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
if args.tenant:
|
||||
for tenant in args.tenant.split(','):
|
||||
if tenant not in data['tenants'].keys():
|
||||
raise(Exception('tenant not found'))
|
||||
|
||||
locals()[args.action]()
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/python2
|
||||
import pickle
|
||||
import sys
|
||||
|
||||
wcs_password = open('/srv/nfs/etc/wcs/password').read().strip()
|
||||
|
||||
def adapt_wcs(path, dbname, dbmaster='prod.saas.entrouvert.org.clusters.entrouvert.org'):
|
||||
dbname = dbname.strip()
|
||||
with open(path, 'rb') as fh:
|
||||
cf = pickle.load(fh)
|
||||
if not 'postgresql' in cf:
|
||||
print('%s not a postgresql instance' % path)
|
||||
cf['postgresql']['host'] = dbmaster
|
||||
cf['postgresql']['port'] = '5432'
|
||||
cf['postgresql']['user'] = 'wcs'
|
||||
cf['postgresql']['password'] = wcs_password
|
||||
cf['postgresql']['database'] = dbname
|
||||
|
||||
with open(path, 'wb') as fh:
|
||||
pickle.dump(cf, fh)
|
||||
|
||||
adapt_wcs(sys.argv[1], sys.argv[2])
|
Reference in New Issue