data: import site performances (#51472)
gitea-wip/combo/pipeline/head Build started... Details
gitea/combo/pipeline/head Build started... Details

This commit is contained in:
Lauréline Guérin 2021-03-01 09:47:36 +01:00
parent a55d79e99b
commit a8ab4035de
No known key found for this signature in database
GPG Key ID: 1FAB9B9B4F93D473
2 changed files with 40 additions and 10 deletions

View File

@ -469,12 +469,14 @@ class Page(models.Model):
return serialized_page
@classmethod
def load_serialized_page(cls, json_page, snapshot=None, request=None):
def load_serialized_page(cls, json_page, page=None, snapshot=None, request=None):
json_page['model'] = 'data.page'
json_page['fields']['groups'] = [
[x] for x in json_page['fields']['groups'] if isinstance(x, six.string_types)
]
page, created = Page.objects.get_or_create(slug=json_page['fields']['slug'], snapshot=snapshot)
created = None
if page is None:
page, created = Page.objects.get_or_create(slug=json_page['fields']['slug'], snapshot=snapshot)
json_page['pk'] = page.id
parent_slug = json_page['fields'].get('parent') or []
if parent_slug and not Page.objects.filter(slug=parent_slug[0]).exists():
@ -521,11 +523,39 @@ class Page(models.Model):
@classmethod
def load_serialized_pages(cls, json_site, request=None):
cells = []
for json_page in json_site:
cls.load_serialized_page(json_page, request=request)
cells.extend(json_page.get('cells'))
cls.load_serialized_cells(cells)
cells_to_load = []
to_load = []
to_build = []
try:
post_save.disconnect(cell_maintain_page_cell_cache)
post_delete.disconnect(cell_maintain_page_cell_cache)
for json_page in json_site:
# pre-create pages
page, created = Page.objects.get_or_create(slug=json_page['fields']['slug'])
to_load.append((page, created, json_page))
# delete cells of already existing pages
to_clean = [p for p, created, j in to_load if not created]
for klass in get_cell_classes():
if klass is None:
continue
klass.objects.filter(page__in=to_clean).delete()
# now load pages
for (page, created, json_page) in to_load:
to_build.append(cls.load_serialized_page(json_page, page=page, request=request))
cells_to_load.extend(json_page.get('cells'))
# and cells
cls.load_serialized_cells(cells_to_load)
finally:
post_save.connect(cell_maintain_page_cell_cache)
post_delete.connect(cell_maintain_page_cell_cache)
# build cache
for page in to_build:
page.build_cell_cache()
@classmethod
def export_all_for_json(cls):

View File

@ -832,7 +832,7 @@ def test_site_export_import_json(app, admin_user):
resp.form['site_file'] = Upload('site-export.json', site_export, 'application/json')
with CaptureQueriesContext(connection) as ctx:
resp = resp.form.submit()
assert len(ctx.captured_queries) in [823, 824]
assert len(ctx.captured_queries) in [268, 269]
Page.objects.all().delete()
assert LinkCell.objects.count() == 0
@ -841,7 +841,7 @@ def test_site_export_import_json(app, admin_user):
resp.form['site_file'] = Upload('site-export.json', site_export, 'application/json')
with CaptureQueriesContext(connection) as ctx:
resp = resp.form.submit()
assert len(ctx.captured_queries) == 364
assert len(ctx.captured_queries) == 237
assert set(Page.objects.get(slug='one').related_cells['cell_types']) == set(
['data_textcell', 'data_linkcell']
)
@ -2187,7 +2187,7 @@ def test_page_versionning(app, admin_user):
resp = resp.click('restore', index=6)
with CaptureQueriesContext(connection) as ctx:
resp = resp.form.submit().follow()
assert len(ctx.captured_queries) == 143
assert len(ctx.captured_queries) == 144
resp2 = resp.click('See online')
assert resp2.text.index('Foobar1') < resp2.text.index('Foobar2') < resp2.text.index('Foobar3')