2022-01-08 16:07:08 +01:00
|
|
|
|
# hobo - portal to configure and deploy applications
|
|
|
|
|
# Copyright (C) 2015-2022 Entr'ouvert
|
|
|
|
|
#
|
|
|
|
|
# This program is free software: you can redistribute it and/or modify it
|
|
|
|
|
# under the terms of the GNU Affero General Public License as published
|
|
|
|
|
# by the Free Software Foundation, either version 3 of the License, or
|
|
|
|
|
# (at your option) any later version.
|
|
|
|
|
#
|
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
# GNU Affero General Public License for more details.
|
|
|
|
|
#
|
|
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
2022-08-09 15:28:17 +02:00
|
|
|
|
import io
|
|
|
|
|
import json
|
2022-10-30 11:20:24 +01:00
|
|
|
|
import os
|
2022-10-30 12:54:13 +01:00
|
|
|
|
import sys
|
2022-08-09 15:28:17 +02:00
|
|
|
|
import tarfile
|
2022-10-31 15:39:06 +01:00
|
|
|
|
import traceback
|
2022-01-08 16:07:08 +01:00
|
|
|
|
import urllib.parse
|
|
|
|
|
|
|
|
|
|
from django.conf import settings
|
2022-10-30 11:20:24 +01:00
|
|
|
|
from django.core.files.base import ContentFile
|
2022-10-30 12:54:13 +01:00
|
|
|
|
from django.db import connection, models
|
2023-03-29 12:10:39 +02:00
|
|
|
|
from django.db.models import JSONField
|
2022-01-08 16:07:08 +01:00
|
|
|
|
from django.utils.text import slugify
|
|
|
|
|
from django.utils.timezone import now
|
2023-03-29 12:10:39 +02:00
|
|
|
|
from django.utils.translation import gettext_lazy as _
|
2022-01-08 16:07:08 +01:00
|
|
|
|
|
|
|
|
|
from hobo.environment.utils import get_installed_services
|
|
|
|
|
|
|
|
|
|
from .utils import Requests
|
|
|
|
|
|
|
|
|
|
requests = Requests()
|
|
|
|
|
|
|
|
|
|
|
2022-11-14 15:16:12 +01:00
|
|
|
|
def get_object_types():
|
|
|
|
|
object_types = []
|
|
|
|
|
for service_id, services in getattr(settings, 'KNOWN_SERVICES', {}).items():
|
|
|
|
|
if service_id not in Application.SUPPORTED_MODULES:
|
|
|
|
|
continue
|
|
|
|
|
service_objects = {x.get_base_url_path(): x for x in get_installed_services(types=[service_id])}
|
|
|
|
|
for service in services.values():
|
|
|
|
|
if service['url'] not in service_objects:
|
|
|
|
|
continue
|
|
|
|
|
if service_objects[service['url']].secondary:
|
|
|
|
|
continue
|
|
|
|
|
url = urllib.parse.urljoin(service['url'], 'api/export-import/')
|
|
|
|
|
response = requests.get(url)
|
|
|
|
|
if not response.ok:
|
|
|
|
|
continue
|
|
|
|
|
for object_type in response.json()['data']:
|
|
|
|
|
object_type['service'] = service
|
|
|
|
|
object_types.append(object_type)
|
|
|
|
|
return object_types
|
|
|
|
|
|
|
|
|
|
|
2022-11-03 09:30:08 +01:00
|
|
|
|
class ApplicationError(Exception):
|
2022-11-02 17:00:17 +01:00
|
|
|
|
def __init__(self, msg):
|
|
|
|
|
self.msg = msg
|
|
|
|
|
|
|
|
|
|
|
2022-11-03 09:30:08 +01:00
|
|
|
|
class ScanError(ApplicationError):
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class DeploymentError(ApplicationError):
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
2023-02-20 16:34:04 +01:00
|
|
|
|
class UnlinkError(ApplicationError):
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
2022-01-08 16:07:08 +01:00
|
|
|
|
class Application(models.Model):
|
|
|
|
|
SUPPORTED_MODULES = ('wcs',)
|
|
|
|
|
|
|
|
|
|
name = models.CharField(max_length=100, verbose_name=_('Name'))
|
|
|
|
|
slug = models.SlugField(max_length=100)
|
2022-10-18 17:38:31 +02:00
|
|
|
|
icon = models.FileField(
|
|
|
|
|
verbose_name=_('Icon'),
|
|
|
|
|
help_text=_(
|
|
|
|
|
'Icon file must be in JPEG or PNG format, and should be a square of at least 512×512 pixels.'
|
|
|
|
|
),
|
|
|
|
|
upload_to='applications/icons/',
|
|
|
|
|
blank=True,
|
|
|
|
|
null=True,
|
|
|
|
|
)
|
2022-01-08 16:07:08 +01:00
|
|
|
|
description = models.TextField(verbose_name=_('Description'), blank=True)
|
2022-10-21 09:28:04 +02:00
|
|
|
|
documentation_url = models.URLField(_('Documentation URL'), blank=True)
|
2022-01-08 16:07:08 +01:00
|
|
|
|
editable = models.BooleanField(default=True)
|
|
|
|
|
elements = models.ManyToManyField('Element', blank=True, through='Relation')
|
|
|
|
|
creation_timestamp = models.DateTimeField(default=now)
|
|
|
|
|
last_update_timestamp = models.DateTimeField(auto_now=True)
|
|
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
|
return '<Application %s>' % self.slug
|
|
|
|
|
|
|
|
|
|
def save(self, *args, **kwargs):
|
|
|
|
|
if not self.slug:
|
|
|
|
|
base_slug = slugify(self.name)[:95]
|
|
|
|
|
slug = base_slug
|
|
|
|
|
i = 1
|
|
|
|
|
while Application.objects.filter(slug=slug).exists():
|
|
|
|
|
slug = '%s-%s' % (base_slug, i)
|
|
|
|
|
i += 1
|
|
|
|
|
self.slug = slug
|
|
|
|
|
super().save(*args, **kwargs)
|
|
|
|
|
|
2022-11-21 15:22:10 +01:00
|
|
|
|
def refresh_elements(self, cache_only=False):
|
|
|
|
|
if not cache_only:
|
|
|
|
|
self.relation_set.filter(auto_dependency=True).delete()
|
2022-11-14 15:47:45 +01:00
|
|
|
|
remote_elements = {}
|
2022-11-21 15:22:10 +01:00
|
|
|
|
relations = self.relation_set.select_related('element')
|
|
|
|
|
elements = {(x.element.type, x.element.slug): (x.element, x) for x in relations}
|
|
|
|
|
current_object_types = {t for t, s in elements}
|
2022-11-14 15:47:45 +01:00
|
|
|
|
for object_type in get_object_types():
|
2022-11-21 15:22:10 +01:00
|
|
|
|
if object_type['id'] not in current_object_types:
|
|
|
|
|
continue
|
|
|
|
|
if not cache_only and object_type.get('minor'):
|
2022-11-14 15:47:45 +01:00
|
|
|
|
continue
|
|
|
|
|
url = object_type['urls']['list']
|
|
|
|
|
response = requests.get(url)
|
|
|
|
|
if not response.ok:
|
|
|
|
|
raise ScanError(
|
|
|
|
|
_('Failed to get elements of type %s (%s)' % (object_type['id'], response.status_code))
|
|
|
|
|
)
|
|
|
|
|
remote_elements[object_type['id']] = {x['id']: x for x in response.json()['data']}
|
|
|
|
|
for element, relation in elements.values():
|
2022-11-21 15:22:10 +01:00
|
|
|
|
if not remote_elements[element.type].get(element.slug):
|
|
|
|
|
continue
|
|
|
|
|
remote_element = remote_elements[element.type][element.slug]
|
|
|
|
|
if cache_only:
|
|
|
|
|
if element.cache == remote_element:
|
|
|
|
|
continue
|
|
|
|
|
element.cache = remote_element
|
|
|
|
|
element.save()
|
|
|
|
|
elements[(element.type, element.slug)] = (element, relation)
|
|
|
|
|
continue
|
|
|
|
|
if element.name == remote_element['text'] and element.cache == remote_element:
|
|
|
|
|
continue
|
|
|
|
|
element.name = remote_element['text']
|
|
|
|
|
element.cache = remote_element
|
|
|
|
|
element.save()
|
|
|
|
|
elements[(element.type, element.slug)] = (element, relation)
|
2022-11-14 15:47:45 +01:00
|
|
|
|
return elements
|
|
|
|
|
|
|
|
|
|
def scandeps(self):
|
|
|
|
|
elements = self.refresh_elements()
|
2022-10-30 11:17:05 +01:00
|
|
|
|
finished = False
|
|
|
|
|
while not finished:
|
|
|
|
|
finished = True
|
2022-11-18 10:23:45 +01:00
|
|
|
|
for el, rel in list(elements.values()):
|
2022-10-30 11:17:05 +01:00
|
|
|
|
dependencies_url = el.cache['urls'].get('dependencies')
|
|
|
|
|
if not dependencies_url:
|
|
|
|
|
continue
|
|
|
|
|
response = requests.get(dependencies_url)
|
2022-11-03 09:30:08 +01:00
|
|
|
|
if not response.ok:
|
2022-11-18 10:23:45 +01:00
|
|
|
|
rel.set_error(response.status_code)
|
2022-11-03 09:30:08 +01:00
|
|
|
|
raise ScanError(
|
|
|
|
|
_(
|
|
|
|
|
'Failed to scan "%s" (type %s, slug %s) dependencies (%s)'
|
|
|
|
|
% (el.name, el.type, el.slug, response.status_code)
|
|
|
|
|
)
|
|
|
|
|
)
|
2022-11-18 10:23:45 +01:00
|
|
|
|
rel.reset_error()
|
2022-10-30 11:17:05 +01:00
|
|
|
|
for dependency in response.json()['data']:
|
|
|
|
|
if (dependency['type'], dependency['id']) in elements:
|
|
|
|
|
continue
|
|
|
|
|
finished = False
|
|
|
|
|
element, created = Element.objects.get_or_create(
|
|
|
|
|
type=dependency['type'], slug=dependency['id'], defaults={'name': dependency['text']}
|
|
|
|
|
)
|
|
|
|
|
element.name = dependency['text']
|
|
|
|
|
element.cache = dependency
|
|
|
|
|
element.save()
|
|
|
|
|
relation, created = Relation.objects.get_or_create(application=self, element=element)
|
|
|
|
|
if created:
|
|
|
|
|
relation.auto_dependency = True
|
|
|
|
|
relation.save()
|
|
|
|
|
elements[(element.type, element.slug)] = (element, relation)
|
|
|
|
|
return elements
|
|
|
|
|
|
2023-02-20 16:34:04 +01:00
|
|
|
|
def unlink(self):
|
|
|
|
|
for service_id, services in getattr(settings, 'KNOWN_SERVICES', {}).items():
|
|
|
|
|
if service_id not in Application.SUPPORTED_MODULES:
|
|
|
|
|
continue
|
|
|
|
|
service_objects = {x.get_base_url_path(): x for x in get_installed_services(types=[service_id])}
|
|
|
|
|
for service in services.values():
|
|
|
|
|
if service['url'] not in service_objects:
|
|
|
|
|
continue
|
|
|
|
|
if service_objects[service['url']].secondary:
|
|
|
|
|
continue
|
|
|
|
|
url = urllib.parse.urljoin(service['url'], 'api/export-import/unlink/')
|
|
|
|
|
response = requests.post(url, data={'application': self.slug})
|
|
|
|
|
if not response.ok:
|
|
|
|
|
raise UnlinkError(
|
|
|
|
|
_('Failed to unlink application in module %s (%s)')
|
|
|
|
|
% (service_id, response.status_code)
|
|
|
|
|
)
|
|
|
|
|
|
2022-01-08 16:07:08 +01:00
|
|
|
|
|
|
|
|
|
class Element(models.Model):
|
2023-02-06 17:50:05 +01:00
|
|
|
|
type = models.CharField(max_length=100, verbose_name=_('Type'))
|
2022-01-08 16:07:08 +01:00
|
|
|
|
slug = models.SlugField(max_length=500, verbose_name=_('Slug'))
|
|
|
|
|
name = models.CharField(max_length=500, verbose_name=_('Name'))
|
|
|
|
|
cache = JSONField(blank=True, default=dict)
|
|
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
|
return '<Element %s/%s>' % (self.type, self.slug)
|
|
|
|
|
|
2022-10-24 16:56:58 +02:00
|
|
|
|
def get_redirect_url(self):
|
|
|
|
|
if self.type == 'roles':
|
|
|
|
|
return
|
|
|
|
|
if not self.cache.get('urls'):
|
|
|
|
|
return
|
|
|
|
|
if self.cache['urls'].get('redirect'):
|
|
|
|
|
return self.cache['urls']['redirect']
|
|
|
|
|
if self.cache['urls'].get('export'):
|
|
|
|
|
return '%sredirect/' % self.cache['urls']['export']
|
|
|
|
|
|
2022-11-18 10:23:45 +01:00
|
|
|
|
|
|
|
|
|
class Relation(models.Model):
|
|
|
|
|
application = models.ForeignKey(Application, on_delete=models.CASCADE)
|
|
|
|
|
element = models.ForeignKey(Element, on_delete=models.CASCADE)
|
|
|
|
|
auto_dependency = models.BooleanField(default=False)
|
|
|
|
|
error = models.BooleanField(default=False)
|
|
|
|
|
error_status = models.CharField(
|
|
|
|
|
max_length=100,
|
|
|
|
|
choices=[
|
|
|
|
|
('notfound', _('Not Found')),
|
|
|
|
|
('error', _('Error')),
|
|
|
|
|
],
|
|
|
|
|
null=True,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
|
return '<Relation %s - %s/%s>' % (self.application.slug, self.element.type, self.element.slug)
|
|
|
|
|
|
2022-11-04 09:54:55 +01:00
|
|
|
|
def set_error(self, http_status_code):
|
|
|
|
|
self.error = True
|
|
|
|
|
self.error_status = 'notfound' if http_status_code == 404 else 'error'
|
|
|
|
|
self.save()
|
|
|
|
|
|
|
|
|
|
def reset_error(self):
|
|
|
|
|
self.error = False
|
|
|
|
|
self.error_status = None
|
|
|
|
|
self.save()
|
|
|
|
|
|
2022-01-08 16:07:08 +01:00
|
|
|
|
|
|
|
|
|
class Version(models.Model):
|
|
|
|
|
application = models.ForeignKey(Application, on_delete=models.CASCADE)
|
2022-10-20 14:39:59 +02:00
|
|
|
|
number = models.CharField(max_length=100, verbose_name=_('Number'))
|
|
|
|
|
notes = models.TextField(verbose_name=_('Notes'), blank=True)
|
2022-01-08 16:07:08 +01:00
|
|
|
|
bundle = models.FileField(upload_to='applications', blank=True, null=True)
|
|
|
|
|
creation_timestamp = models.DateTimeField(default=now)
|
|
|
|
|
last_update_timestamp = models.DateTimeField(auto_now=True)
|
|
|
|
|
deployment_status = JSONField(blank=True, default=dict)
|
|
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
|
return '<Version %s>' % self.application.slug
|
|
|
|
|
|
2023-02-20 14:23:23 +01:00
|
|
|
|
def create_bundle(self, job=None):
|
2022-10-30 11:20:24 +01:00
|
|
|
|
app = self.application
|
|
|
|
|
elements = app.scandeps()
|
|
|
|
|
tar_io = io.BytesIO()
|
|
|
|
|
with tarfile.open(mode='w', fileobj=tar_io) as tar:
|
|
|
|
|
manifest_json = {
|
|
|
|
|
'application': app.name,
|
|
|
|
|
'slug': app.slug,
|
|
|
|
|
'description': app.description,
|
|
|
|
|
'documentation_url': app.documentation_url,
|
|
|
|
|
'icon': os.path.basename(app.icon.name) if app.icon.name else None,
|
|
|
|
|
'version_number': self.number,
|
|
|
|
|
'version_notes': self.notes,
|
|
|
|
|
'elements': [],
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for element, relation in elements.values():
|
|
|
|
|
manifest_json['elements'].append(
|
|
|
|
|
{
|
|
|
|
|
'type': element.type,
|
|
|
|
|
'slug': element.slug,
|
|
|
|
|
'name': element.name,
|
|
|
|
|
'auto-dependency': relation.auto_dependency,
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
response = requests.get(element.cache['urls']['export'])
|
|
|
|
|
tarinfo = tarfile.TarInfo('%s/%s' % (element.type, element.slug))
|
|
|
|
|
tarinfo.mtime = self.last_update_timestamp.timestamp()
|
|
|
|
|
tarinfo.size = int(response.headers['content-length'])
|
|
|
|
|
tar.addfile(tarinfo, fileobj=io.BytesIO(response.content))
|
|
|
|
|
|
|
|
|
|
manifest_fd = io.BytesIO(json.dumps(manifest_json, indent=2).encode())
|
|
|
|
|
tarinfo = tarfile.TarInfo('manifest.json')
|
|
|
|
|
tarinfo.size = len(manifest_fd.getvalue())
|
|
|
|
|
tarinfo.mtime = self.last_update_timestamp.timestamp()
|
|
|
|
|
tar.addfile(tarinfo, fileobj=manifest_fd)
|
|
|
|
|
|
|
|
|
|
if app.icon.name:
|
|
|
|
|
icon_fd = app.icon.file
|
|
|
|
|
tarinfo = tarfile.TarInfo(manifest_json['icon'])
|
|
|
|
|
tarinfo.size = icon_fd.size
|
|
|
|
|
tarinfo.mtime = self.last_update_timestamp.timestamp()
|
|
|
|
|
tar.addfile(tarinfo, fileobj=icon_fd)
|
|
|
|
|
|
|
|
|
|
self.bundle.save('%s.tar' % app.slug, content=ContentFile(tar_io.getvalue()))
|
|
|
|
|
self.save()
|
|
|
|
|
|
2023-02-20 14:23:23 +01:00
|
|
|
|
bundle_content = self.bundle.read()
|
|
|
|
|
self.do_something_with_bundle(bundle_content, 'declare', job=job)
|
|
|
|
|
|
2022-11-17 15:15:32 +01:00
|
|
|
|
def deploy(self, job=None):
|
2022-01-08 16:07:08 +01:00
|
|
|
|
bundle_content = self.bundle.read()
|
2022-08-09 15:28:17 +02:00
|
|
|
|
self.deploy_roles(bundle_content)
|
2023-02-20 14:23:23 +01:00
|
|
|
|
self.do_something_with_bundle(bundle_content, 'deploy', job=job)
|
|
|
|
|
self.application.refresh_elements(cache_only=True)
|
|
|
|
|
|
|
|
|
|
def do_something_with_bundle(self, bundle_content, action, job=None):
|
|
|
|
|
if action == 'deploy':
|
|
|
|
|
target_url = 'api/export-import/bundle-import/'
|
|
|
|
|
exception_message = _('Failed to deploy module %s (%s)')
|
|
|
|
|
elif action == 'declare':
|
|
|
|
|
target_url = 'api/export-import/bundle-declare/'
|
|
|
|
|
exception_message = _('Failed to declare elements for module %s (%s)')
|
|
|
|
|
|
2022-01-08 16:07:08 +01:00
|
|
|
|
for service_id, services in getattr(settings, 'KNOWN_SERVICES', {}).items():
|
|
|
|
|
if service_id not in Application.SUPPORTED_MODULES:
|
|
|
|
|
continue
|
|
|
|
|
service_objects = {x.get_base_url_path(): x for x in get_installed_services(types=[service_id])}
|
|
|
|
|
for service in services.values():
|
2023-01-19 10:07:47 +01:00
|
|
|
|
if service['url'] not in service_objects:
|
|
|
|
|
continue
|
2022-01-08 16:07:08 +01:00
|
|
|
|
if service_objects[service['url']].secondary:
|
|
|
|
|
continue
|
2023-02-20 14:23:23 +01:00
|
|
|
|
url = urllib.parse.urljoin(service['url'], target_url)
|
2022-09-13 09:42:36 +02:00
|
|
|
|
response = requests.put(url, data=bundle_content)
|
2022-01-08 16:07:08 +01:00
|
|
|
|
if not response.ok:
|
2023-02-20 14:23:23 +01:00
|
|
|
|
raise DeploymentError(exception_message % (service_id, response.status_code))
|
2022-11-17 15:15:32 +01:00
|
|
|
|
if not job:
|
|
|
|
|
continue
|
|
|
|
|
try:
|
|
|
|
|
response_json = response.json()
|
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
|
continue
|
|
|
|
|
if not response_json.get('url'):
|
|
|
|
|
continue
|
|
|
|
|
if service_id not in job.progression_urls:
|
|
|
|
|
job.progression_urls[service_id] = {}
|
|
|
|
|
job.progression_urls[service_id][service['title']] = response_json['url']
|
|
|
|
|
job.save()
|
2022-08-09 15:28:17 +02:00
|
|
|
|
|
|
|
|
|
def get_authentic_service(self):
|
|
|
|
|
for service_id, services in getattr(settings, 'KNOWN_SERVICES', {}).items():
|
|
|
|
|
if service_id == 'authentic':
|
|
|
|
|
for service in services.values():
|
|
|
|
|
return service
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
def deploy_roles(self, bundle):
|
|
|
|
|
tar_io = io.BytesIO(bundle)
|
|
|
|
|
service = self.get_authentic_service()
|
|
|
|
|
if not service:
|
|
|
|
|
return
|
|
|
|
|
roles_api_url = urllib.parse.urljoin(service['url'], 'api/roles/?update_or_create=slug')
|
|
|
|
|
provision_api_url = urllib.parse.urljoin(service['url'], 'api/provision/')
|
|
|
|
|
with tarfile.open(fileobj=tar_io) as tar:
|
|
|
|
|
manifest = json.loads(tar.extractfile('manifest.json').read().decode())
|
|
|
|
|
for element in manifest.get('elements'):
|
|
|
|
|
if element.get('type') != 'roles':
|
|
|
|
|
continue
|
|
|
|
|
role_info = json.loads(tar.extractfile('%s/%s' % (element['type'], element['slug'])).read())
|
|
|
|
|
# create or update
|
|
|
|
|
response = requests.post(roles_api_url, json=role_info)
|
|
|
|
|
if not response.ok:
|
2022-11-02 17:00:17 +01:00
|
|
|
|
raise DeploymentError(
|
|
|
|
|
_('Failed to create role %s (%s)') % (element['slug'], response.status_code)
|
|
|
|
|
)
|
2022-08-09 15:28:17 +02:00
|
|
|
|
# then force provisionning
|
|
|
|
|
response = requests.post(provision_api_url, json={'role_uuid': response.json()['uuid']})
|
|
|
|
|
if not response.ok:
|
2022-11-02 17:00:17 +01:00
|
|
|
|
raise DeploymentError(
|
|
|
|
|
_('Failed to provision role %s (%s)') % (element['slug'], response.status_code)
|
|
|
|
|
)
|
2022-10-30 12:54:13 +01:00
|
|
|
|
|
|
|
|
|
|
2022-11-17 15:15:32 +01:00
|
|
|
|
STATUS_CHOICES = [
|
|
|
|
|
('registered', _('Registered')),
|
|
|
|
|
('running', _('Running')),
|
|
|
|
|
('failed', _('Failed')),
|
|
|
|
|
('completed', _('Completed')),
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
2022-10-30 12:54:13 +01:00
|
|
|
|
class AsyncJob(models.Model):
|
|
|
|
|
label = models.CharField(max_length=100)
|
|
|
|
|
status = models.CharField(
|
|
|
|
|
max_length=100,
|
|
|
|
|
default='registered',
|
2022-11-17 15:15:32 +01:00
|
|
|
|
choices=STATUS_CHOICES,
|
2022-10-30 12:54:13 +01:00
|
|
|
|
)
|
|
|
|
|
creation_timestamp = models.DateTimeField(default=now)
|
|
|
|
|
last_update_timestamp = models.DateTimeField(auto_now=True)
|
|
|
|
|
completion_timestamp = models.DateTimeField(default=None, null=True)
|
|
|
|
|
exception = models.TextField()
|
|
|
|
|
|
|
|
|
|
application = models.ForeignKey(Application, on_delete=models.CASCADE)
|
|
|
|
|
version = models.ForeignKey(Version, on_delete=models.CASCADE, null=True)
|
|
|
|
|
action = models.CharField(max_length=100)
|
2022-11-17 15:15:32 +01:00
|
|
|
|
progression_urls = JSONField(blank=True, default=dict)
|
2022-10-30 12:54:13 +01:00
|
|
|
|
|
|
|
|
|
raise_exception = True
|
|
|
|
|
|
|
|
|
|
def run(self, spool=False):
|
|
|
|
|
if 'uwsgi' in sys.modules and spool:
|
|
|
|
|
from hobo.applications.spooler import run_job
|
|
|
|
|
|
|
|
|
|
tenant = getattr(connection, 'tenant', None)
|
|
|
|
|
domain = getattr(tenant, 'domain_url', '')
|
|
|
|
|
run_job.spool(domain=domain.encode(), job_id=str(self.pk).encode())
|
|
|
|
|
return
|
|
|
|
|
self.status = 'running'
|
|
|
|
|
self.save()
|
|
|
|
|
try:
|
|
|
|
|
if self.action == 'scandeps':
|
|
|
|
|
self.application.scandeps()
|
|
|
|
|
elif self.action == 'create_bundle':
|
2023-02-20 14:23:23 +01:00
|
|
|
|
self.version.create_bundle(self)
|
2022-10-31 15:39:06 +01:00
|
|
|
|
elif self.action == 'deploy':
|
2022-11-17 15:15:32 +01:00
|
|
|
|
self.version.deploy(self)
|
2022-11-03 09:30:08 +01:00
|
|
|
|
except ApplicationError as e:
|
2022-11-02 17:00:17 +01:00
|
|
|
|
self.status = 'failed'
|
|
|
|
|
self.exception = e.msg
|
2022-10-30 12:54:13 +01:00
|
|
|
|
if self.raise_exception:
|
|
|
|
|
raise
|
2022-11-02 17:00:17 +01:00
|
|
|
|
except Exception:
|
2022-10-30 12:54:13 +01:00
|
|
|
|
self.status = 'failed'
|
|
|
|
|
self.exception = traceback.format_exc()
|
2022-11-02 17:00:17 +01:00
|
|
|
|
if self.raise_exception:
|
|
|
|
|
raise
|
2022-10-30 12:54:13 +01:00
|
|
|
|
finally:
|
2022-11-02 17:00:17 +01:00
|
|
|
|
if self.status == 'running':
|
|
|
|
|
self.status = 'completed'
|
|
|
|
|
self.completion_timestamp = now()
|
|
|
|
|
self.save()
|