From 27f031485ebf2180ae8ab70458361b89ec44fff0 Mon Sep 17 00:00:00 2001 From: Joshua Munn Date: Wed, 25 Oct 2023 12:23:44 +0100 Subject: [PATCH 01/10] Add tox and GitHub actions workflow --- .github/workflows/test.yml | 29 +++++++++++++++++++++++++++++ tox.ini | 27 +++++++++++++++++++++++++++ 2 files changed, 56 insertions(+) create mode 100644 .github/workflows/test.yml create mode 100644 tox.ini diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..799ddc1 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,29 @@ +name: wagtail-transfer CI + +on: + push: + branches: + - main + - master + - 'stable/**' + pull_request: + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + python: ["3.8", "3.9", "3.10", "3.11"] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install tox tox-gh-actions + - name: Test with tox + run: tox diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..57e7dac --- /dev/null +++ b/tox.ini @@ -0,0 +1,27 @@ +[tox] +skipsdist = True +usedevelop = True +envlist = + python{3.8,3.9,3.10}-django{3.2,4.1}-wagtail{4.1,5.0,5.1} + python3.11-django4.1-wagtail4.1 + python{3.8,3.9,3.10,3.11}-django{4.1,4.2}-wagtail{5.0,5.1} + +[gh-actions] +python = + 3.8: python3.8 + 3.9: python3.9 + 3.10: python3.10 + 3.11: python3.11 + +[testenv] +install_command = pip install -e . -U {opts} {packages} +commands = python runtests.py + +deps = + django3.2: Django>=3.2,<3.3 + django4.1: Django>=4.1,<4.2 + django4.2: Django>=4.2,<5.0 + + wagtail4.1: wagtail>=4.1,<4.2 + wagtail5.0: wagtail>=5.0,<5.1 + wagtail5.1: wagtail>=5.1,<5.2 From ed4c006b069835314b5073cdf833b7b4d15bec53 Mon Sep 17 00:00:00 2001 From: Joshua Munn Date: Wed, 25 Oct 2023 13:10:36 +0100 Subject: [PATCH 02/10] Add ruff config --- ruff.toml | 54 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 ruff.toml diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 0000000..6137866 --- /dev/null +++ b/ruff.toml @@ -0,0 +1,54 @@ +target-version = "py38" + +exclude = [ + "vendor", + "dist", + "build", + "venv", + ".venv", + ".tox", + ".git", + "__pycache__", + "node_modules", + "LC_MESSAGES", + "locale", + "migrations", +] + +select = [ + "B", # flake8-bugbear + "BLE", # flake8-blind-except + "C4", # flake8-comprehensions + "DJ", # flake8-django + "E", # pycodestyle errors + "F", # pyflakes + "I", # isort + "PGH", # pygrep-hooks + "RUF100", # unused noqa + "S", # flake8-bandit + "T20", # flake8-print + "UP", # pyupgrade + "W", # pycodestyle warnings + "YTT", # flake8-2020 +] + +fixable = ["C4", "E", "F", "I", "UP"] + +ignore = [ + "E501", # line-too-long (conflicts with formatter) + "W191", # tab-indentation (conflicts with formatter) + "DJ008", # model without __str__ method + "B019", # functools.cache/lru_cache can lead to memory leaks + "S113", # use of requests without timeout +] + +[lint.per-file-ignores] +"**/tests/**/*.py" = [ + "S105", # possible hardcoded password + "S106", # possible hardcoded password + "DJ001", # use of null=True on CharField +] +".circleci/report_nightly_build_failure.py" = ["T201"] # use of print() + +[isort] +known-first-party = ["wagtail_transfer"] From ca3b4000068d19e736149a038b0d479e5e78b8e9 Mon Sep 17 00:00:00 2001 From: Joshua Munn Date: Wed, 25 Oct 2023 13:10:59 +0100 Subject: [PATCH 03/10] Do first pass of ruff fixes --- .circleci/report_nightly_build_failure.py | 1 + runtests.py | 1 + setup.py | 4 +++ tests/blocks.py | 12 +++++-- tests/models.py | 2 +- tests/settings.py | 1 + tests/tests/test_api.py | 24 +++++++++----- tests/tests/test_import.py | 31 +++++++++++++------ tests/tests/test_views.py | 5 +-- tests/urls.py | 2 +- wagtail_transfer/admin_urls.py | 1 + wagtail_transfer/auth.py | 1 + wagtail_transfer/field_adapters.py | 6 ++-- wagtail_transfer/files.py | 2 ++ wagtail_transfer/locators.py | 6 ++-- .../commands/preseed_transfer_table.py | 8 ++--- wagtail_transfer/operations.py | 3 +- wagtail_transfer/richtext.py | 4 ++- wagtail_transfer/streamfield.py | 11 +++++-- wagtail_transfer/urls.py | 1 + wagtail_transfer/views.py | 4 ++- wagtail_transfer/wagtail_hooks.py | 1 - 22 files changed, 92 insertions(+), 39 deletions(-) diff --git a/.circleci/report_nightly_build_failure.py b/.circleci/report_nightly_build_failure.py index 7834801..24bf2eb 100644 --- a/.circleci/report_nightly_build_failure.py +++ b/.circleci/report_nightly_build_failure.py @@ -7,6 +7,7 @@ import requests + if 'SLACK_WEBHOOK_URL' in os.environ: print("Reporting to #nightly-build-failures slack channel") response = requests.post(os.environ['SLACK_WEBHOOK_URL'], json={ diff --git a/runtests.py b/runtests.py index 5c6aa2a..1bd72c1 100755 --- a/runtests.py +++ b/runtests.py @@ -5,5 +5,6 @@ from django.core.management import execute_from_command_line + os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' execute_from_command_line([sys.argv[0], 'test'] + sys.argv[1:]) diff --git a/setup.py b/setup.py index 13e0e2a..be75dec 100644 --- a/setup.py +++ b/setup.py @@ -2,6 +2,7 @@ from setuptools import find_packages, setup + setup( name='wagtail-transfer', version='0.9.1', @@ -19,6 +20,9 @@ 'mkdocs>=1.0,<1.1', 'mkdocs-material>=4.6,<4.7', ], + 'dev': [ + 'ruff>=1.2.0', + ], }, python_requires=">=3.7", license='BSD', diff --git a/tests/blocks.py b/tests/blocks.py index a0f8dfd..51ba31c 100644 --- a/tests/blocks.py +++ b/tests/blocks.py @@ -1,6 +1,12 @@ -from wagtail.blocks import (CharBlock, IntegerBlock, ListBlock, - PageChooserBlock, RichTextBlock, StreamBlock, - StructBlock) +from wagtail.blocks import ( + CharBlock, + IntegerBlock, + ListBlock, + PageChooserBlock, + RichTextBlock, + StreamBlock, + StructBlock, +) from wagtail.documents.blocks import DocumentChooserBlock diff --git a/tests/models.py b/tests/models.py index 4ba8b27..bfe8120 100644 --- a/tests/models.py +++ b/tests/models.py @@ -34,7 +34,7 @@ class Category(models.Model): colour = models.CharField(max_length=255, blank=True, null=True) def __str__(self): - return "{} {}".format(self.colour, self.name) + return f"{self.colour} {self.name}" class SponsoredPage(Page): diff --git a/tests/settings.py b/tests/settings.py index edd8d60..ea3c7c4 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -1,5 +1,6 @@ import os + BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) diff --git a/tests/tests/test_api.py b/tests/tests/test_api.py index 63cf69e..b028a33 100644 --- a/tests/tests/test_api.py +++ b/tests/tests/test_api.py @@ -2,6 +2,7 @@ import os.path import shutil import uuid + from datetime import datetime, timezone from unittest import mock @@ -14,13 +15,22 @@ from wagtail.images.models import Image from wagtail.models import Collection, Page -from tests.models import (Advert, Avatar, Category, LongAdvert, - ModelWithManyToMany, PageWithParentalManyToMany, - PageWithRichText, PageWithStreamField, SectionedPage, - SponsoredPage) +from tests.models import ( + Advert, + Avatar, + Category, + LongAdvert, + ModelWithManyToMany, + PageWithParentalManyToMany, + PageWithRichText, + PageWithStreamField, + SectionedPage, + SponsoredPage, +) from wagtail_transfer.auth import digest_for_source from wagtail_transfer.models import IDMapping + # We could use settings.MEDIA_ROOT here, but this way we avoid clobbering a real media folder if we # ever run these tests with non-test settings for any reason TEST_MEDIA_DIR = os.path.join(os.path.join(settings.BASE_DIR, 'test-media')) @@ -78,7 +88,7 @@ def test_model_object_chooser(self): def test_model_object_next_pagination(self): # Create 50 more categories for i in range(50): - name = "Car #{}".format(i) + name = f"Car #{i}" Category.objects.create(name=name, colour="Violet") response = self.client.get(f'/wagtail-transfer/api/chooser/models/tests.category/{self.get_parameters()}') @@ -98,7 +108,7 @@ def test_model_object_next_pagination(self): def test_model_object_previous_and_next_pagination(self): # Create 50 more categories for i in range(50): - name = "Car #{}".format(i) + name = f"Car #{i}" Category.objects.create(name=name, colour="Violet") response = self.client.get(f'/wagtail-transfer/api/chooser/models/tests.category/{self.get_parameters("page=2")}') @@ -118,7 +128,7 @@ def test_model_object_previous_and_next_pagination(self): def test_model_object_previous_pagination(self): # Create 50 more categories for i in range(50): - name = "Car #{}".format(i) + name = f"Car #{i}" Category.objects.create(name=name, colour="Violet") response = self.client.get(f'/wagtail-transfer/api/chooser/models/tests.category/{self.get_parameters("page=3")}') diff --git a/tests/tests/test_import.py b/tests/tests/test_import.py index 02c1dda..2cfffa2 100644 --- a/tests/tests/test_import.py +++ b/tests/tests/test_import.py @@ -1,6 +1,7 @@ import importlib import os.path import shutil + from datetime import datetime, timezone from unittest import mock @@ -9,16 +10,28 @@ from django.core.files.images import ImageFile from django.test import TestCase, override_settings from wagtail.images.models import Image -from wagtail.models import Collection, Comment, Page - -from tests.models import (Advert, Author, Avatar, Category, LongAdvert, - ModelWithManyToMany, PageWithParentalManyToMany, - PageWithRelatedPages, PageWithRichText, - PageWithStreamField, RedirectPage, SectionedPage, - SimplePage, SponsoredPage) +from wagtail.models import Collection, Page + +from tests.models import ( + Advert, + Author, + Avatar, + Category, + LongAdvert, + ModelWithManyToMany, + PageWithParentalManyToMany, + PageWithRelatedPages, + PageWithRichText, + PageWithStreamField, + RedirectPage, + SectionedPage, + SimplePage, + SponsoredPage, +) from wagtail_transfer.models import IDMapping from wagtail_transfer.operations import ImportPlanner + # We could use settings.MEDIA_ROOT here, but this way we avoid clobbering a real media folder if we # ever run these tests with non-test settings for any reason TEST_MEDIA_DIR = os.path.join(os.path.join(settings.BASE_DIR, 'test-media')) @@ -60,7 +73,7 @@ def test_import_model(self): importer.run() cats = Category.objects.all() - self.assertEquals(cats.count(), 2) + self.assertEqual(cats.count(), 2) def test_import_pages(self): @@ -1567,7 +1580,7 @@ def test_omitting_references_in_m2m_relations(self): # salad_dressing_page's related_pages should include the oil (id=30) and vinegar (id=21) # pages, but not the missing and not-to-be-imported page id=31 - self.assertEqual(set(salad_dressing_page.related_pages.all()), set([oil_page, vinegar_page])) + self.assertEqual(set(salad_dressing_page.related_pages.all()), {oil_page, vinegar_page}) def test_import_with_soft_dependency_on_grandchild(self): # https://github.com/wagtail/wagtail-transfer/issues/84 - diff --git a/tests/tests/test_views.py b/tests/tests/test_views.py index 668f493..bfbe095 100644 --- a/tests/tests/test_views.py +++ b/tests/tests/test_views.py @@ -1,4 +1,5 @@ import json + from datetime import date, datetime, timezone from unittest import mock @@ -144,7 +145,7 @@ def test_run(self, get, post): self.assertEqual(args[0], 'https://www.example.com/wagtail-transfer/api/objects/') self.assertIn('digest', kwargs['params']) requested_ids = json.loads(kwargs['data'])['tests.advert'] - self.assertEqual(set(requested_ids), set([8, 11])) + self.assertEqual(set(requested_ids), {8, 11}) # Check import results updated_page = SponsoredPage.objects.get(url_path='/home/oil-is-still-great/') @@ -265,7 +266,7 @@ def test_missing_related_object(self, get, post): self.assertEqual(args[0], 'https://www.example.com/wagtail-transfer/api/objects/') self.assertIn('digest', kwargs['params']) requested_ids = json.loads(kwargs['data'])['tests.advert'] - self.assertEqual(set(requested_ids), set([8, 11])) + self.assertEqual(set(requested_ids), {8, 11}) # Check import results updated_page = SponsoredPage.objects.get(url_path='/home/oil-is-still-great/') diff --git a/tests/urls.py b/tests/urls.py index 1babbda..de4c2a4 100644 --- a/tests/urls.py +++ b/tests/urls.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import, unicode_literals from django.urls import include, re_path from wagtail import urls as wagtail_urls @@ -6,6 +5,7 @@ from wagtail_transfer import urls as wagtailtransfer_urls + urlpatterns = [ re_path(r'^admin/', include(wagtailadmin_urls)), re_path(r'^wagtail-transfer/', include(wagtailtransfer_urls)), diff --git a/wagtail_transfer/admin_urls.py b/wagtail_transfer/admin_urls.py index b0fc079..ffe99fb 100644 --- a/wagtail_transfer/admin_urls.py +++ b/wagtail_transfer/admin_urls.py @@ -4,6 +4,7 @@ from .vendor.wagtail_api_v2.router import WagtailAPIRouter + chooser_api = WagtailAPIRouter('wagtail_transfer_admin:page_chooser_api') chooser_api.register_endpoint('pages', views.PageChooserAPIViewSet) diff --git a/wagtail_transfer/auth.py b/wagtail_transfer/auth.py index 85b519b..d022842 100644 --- a/wagtail_transfer/auth.py +++ b/wagtail_transfer/auth.py @@ -5,6 +5,7 @@ from django.conf import settings from django.core.exceptions import PermissionDenied + GROUP_QUERY_WITH_DIGEST = re.compile('(?P.*?)&?digest=(?P[^&]*)(?P.*)') def check_get_digest_wrapper(view_func): diff --git a/wagtail_transfer/field_adapters.py b/wagtail_transfer/field_adapters.py index 2ab1f2a..7c2d7af 100644 --- a/wagtail_transfer/field_adapters.py +++ b/wagtail_transfer/field_adapters.py @@ -1,17 +1,16 @@ import json import pathlib + from functools import lru_cache from urllib.parse import urlparse from django.conf import settings -from django.contrib.contenttypes.fields import (GenericForeignKey, - GenericRelation) +from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ImproperlyConfigured from django.db import models from django.db.models.fields.reverse_related import ManyToOneRel from django.utils.encoding import is_protected_type -from django.utils.functional import cached_property from modelcluster.fields import ParentalKey from taggit.managers import TaggableManager from wagtail import hooks @@ -23,6 +22,7 @@ from .richtext import get_reference_handler from .streamfield import get_object_references, update_object_ids + WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS = getattr(settings, "WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS", [('wagtailimages.image', 'tagged_items', True)]) FOLLOWED_REVERSE_RELATIONS = { (model_label.lower(), relation.lower()) for model_label, relation, _ in WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS diff --git a/wagtail_transfer/files.py b/wagtail_transfer/files.py index 1fbe65c..539beae 100644 --- a/wagtail_transfer/files.py +++ b/wagtail_transfer/files.py @@ -1,7 +1,9 @@ import hashlib + from contextlib import contextmanager import requests + from django.core.files.base import ContentFile from .models import ImportedFile diff --git a/wagtail_transfer/locators.py b/wagtail_transfer/locators.py index 5d7c893..8468f81 100644 --- a/wagtail_transfer/locators.py +++ b/wagtail_transfer/locators.py @@ -5,6 +5,7 @@ """ import uuid + from functools import lru_cache from django.conf import settings @@ -14,6 +15,7 @@ from .models import IDMapping, get_base_model + UUID_SEQUENCE = 0 # dict of models that should be located by field values using FieldLocator, @@ -46,7 +48,7 @@ def find(self, uid): if mapping.content_type != self.content_type: raise IntegrityError( - "Content type mismatch! Expected %r, got %r" % (self.content_type, mapping.content_type) + f"Content type mismatch! Expected {self.content_type!r}, got {mapping.content_type!r}" ) return mapping.content_object @@ -82,7 +84,7 @@ def attach_uid(self, instance, uid): """ if not isinstance(instance, self.model): raise IntegrityError( - "IDMappingLocator expected a %s instance, got %r" % (self.model, instance) + f"IDMappingLocator expected a {self.model} instance, got {instance!r}" ) # use update_or_create to account for the possibility of an existing IDMapping for the same diff --git a/wagtail_transfer/management/commands/preseed_transfer_table.py b/wagtail_transfer/management/commands/preseed_transfer_table.py index 5a58052..27be2f4 100644 --- a/wagtail_transfer/management/commands/preseed_transfer_table.py +++ b/wagtail_transfer/management/commands/preseed_transfer_table.py @@ -5,8 +5,8 @@ from django.core.exceptions import ObjectDoesNotExist from django.core.management.base import BaseCommand, CommandError -from wagtail_transfer.models import (IDMapping, get_base_model, - get_model_for_path) +from wagtail_transfer.models import IDMapping, get_base_model, get_model_for_path + # Namespace UUID common to all wagtail-transfer installances, used with uuid5 to generate # a predictable UUID for any given model-name / PK combination @@ -51,7 +51,7 @@ def handle(self, *args, **options): created_count = 0 for model in models: - model_name = "%s.%s" % (model._meta.app_label, model._meta.model_name) + model_name = f"{model._meta.app_label}.{model._meta.model_name}" content_type = ContentType.objects.get_for_model(model) # find IDs of instances of this model that already exist in the IDMapping table @@ -72,7 +72,7 @@ def handle(self, *args, **options): for pk in unmapped_ids: _, created = IDMapping.objects.get_or_create( content_type=content_type, local_id=pk, - defaults={'uid': uuid.uuid5(NAMESPACE, "%s:%s" % (model_name, pk))} + defaults={'uid': uuid.uuid5(NAMESPACE, f"{model_name}:{pk}")} ) if created: created_count += 1 diff --git a/wagtail_transfer/operations.py b/wagtail_transfer/operations.py index bf72ba6..72b6e41 100644 --- a/wagtail_transfer/operations.py +++ b/wagtail_transfer/operations.py @@ -1,4 +1,5 @@ import json + from copy import copy from django.conf import settings @@ -13,6 +14,7 @@ from .locators import get_locator_for_model from .models import get_base_model, get_base_model_for_path, get_model_for_path + # Models which should be updated to their latest version when encountered in object references default_update_related_models = ['wagtailimages.image'] @@ -384,7 +386,6 @@ def _handle_task(self, task): # and add objectives to ensure that they're all updated to their newest versions for rel in get_all_child_relations(specific_model): related_base_model = get_base_model(rel.related_model) - child_uids = set() for child_obj_pk in object_data['fields'][rel.name]: diff --git a/wagtail_transfer/richtext.py b/wagtail_transfer/richtext.py index 5c01e0b..33b004c 100644 --- a/wagtail_transfer/richtext.py +++ b/wagtail_transfer/richtext.py @@ -1,4 +1,5 @@ import re + from functools import partial from wagtail.rich_text import features @@ -6,6 +7,7 @@ from .models import get_base_model + FIND_A_TAG = re.compile(r']*)>(.*?)') FIND_EMBED_TAG = re.compile(r']*)/>') FIND_ID = re.compile(r'id="([^"]*)"') @@ -42,7 +44,7 @@ def update_tag_id(self, match, destination_ids_by_source): # The tag has no inner content, return a blank string instead return '' # Otherwise update the id and construct the new tag string - new_tag_body = FIND_ID.sub('id="{0}"'.format(str(new_id)), tag_body) + new_tag_body = FIND_ID.sub(f'id="{str(new_id)}"', tag_body) tag_body_offset = match.start(0) new_tag_string = match.group(0)[:(match.start(1)-tag_body_offset)] + new_tag_body + match.group(0)[(match.end(1)-tag_body_offset):] return new_tag_string diff --git a/wagtail_transfer/streamfield.py b/wagtail_transfer/streamfield.py index f545a39..28a1657 100644 --- a/wagtail_transfer/streamfield.py +++ b/wagtail_transfer/streamfield.py @@ -1,8 +1,13 @@ from functools import partial from django.core.exceptions import ValidationError -from wagtail.blocks import (ChooserBlock, ListBlock, RichTextBlock, - StreamBlock, StructBlock) +from wagtail.blocks import ( + ChooserBlock, + ListBlock, + RichTextBlock, + StreamBlock, + StructBlock, +) from .models import get_base_model from .richtext import get_reference_handler @@ -143,7 +148,7 @@ def map_over_json(self, stream, func): new_value = new_block_handler.map_over_json(new_stream, func) except ValidationError: if new_block.required: - raise ValidationError('This block requires a value for {}'.format(new_block)) + raise ValidationError(f'This block requires a value for {new_block}') else: # If the new block isn't required, just set it to the empty value new_value = new_block_handler.empty_value diff --git a/wagtail_transfer/urls.py b/wagtail_transfer/urls.py index 18cffb7..37ccf78 100644 --- a/wagtail_transfer/urls.py +++ b/wagtail_transfer/urls.py @@ -7,6 +7,7 @@ from .vendor.wagtail_api_v2.router import WagtailAPIRouter + chooser_api = WagtailAPIRouter('wagtail_transfer_page_chooser_api') chooser_api.register_endpoint('pages', views.PageChooserAPIViewSet) chooser_api.register_endpoint('models', ModelsAPIViewSet) diff --git a/wagtail_transfer/views.py b/wagtail_transfer/views.py index 989cb53..ffa17d6 100644 --- a/wagtail_transfer/views.py +++ b/wagtail_transfer/views.py @@ -1,7 +1,9 @@ import json + from collections import defaultdict import requests + from django.conf import settings from django.contrib import messages from django.contrib.auth.decorators import permission_required @@ -195,7 +197,7 @@ def chooser_api_proxy(request, source_name, path): if 'models' in request.GET: default_chooser_endpoint = 'models' - base_url = source_config['BASE_URL'] + 'api/chooser/{}/'.format(default_chooser_endpoint) + base_url = source_config['BASE_URL'] + f'api/chooser/{default_chooser_endpoint}/' message = request.GET.urlencode() digest = digest_for_source(source_name, message) diff --git a/wagtail_transfer/wagtail_hooks.py b/wagtail_transfer/wagtail_hooks.py index 36ac74d..ecf3a38 100644 --- a/wagtail_transfer/wagtail_hooks.py +++ b/wagtail_transfer/wagtail_hooks.py @@ -1,6 +1,5 @@ from django.conf import settings from django.contrib.auth.models import Permission -from django.templatetags.static import static from django.urls import include, re_path, reverse from wagtail import hooks from wagtail.admin.menu import MenuItem From e166089173b2337803ba17cdff051d24b4b0515e Mon Sep 17 00:00:00 2001 From: Joshua Munn Date: Wed, 25 Oct 2023 13:22:19 +0100 Subject: [PATCH 04/10] First pass of formatting with ruff --- .circleci/report_nightly_build_failure.py | 15 +- runtests.py | 4 +- setup.py | 58 +- tests/apps.py | 4 +- tests/models.py | 4 +- tests/settings.py | 146 ++-- tests/tests/test_api.py | 693 +++++++++++------- tests/tests/test_import.py | 337 ++++++--- tests/tests/test_views.py | 120 +-- tests/urls.py | 8 +- wagtail_transfer/admin_urls.py | 23 +- wagtail_transfer/apps.py | 4 +- wagtail_transfer/auth.py | 17 +- wagtail_transfer/field_adapters.py | 101 ++- wagtail_transfer/files.py | 11 +- wagtail_transfer/locators.py | 18 +- .../commands/preseed_transfer_table.py | 39 +- wagtail_transfer/models.py | 6 +- wagtail_transfer/operations.py | 163 ++-- wagtail_transfer/richtext.py | 40 +- wagtail_transfer/serializers.py | 56 +- wagtail_transfer/streamfield.py | 38 +- wagtail_transfer/urls.py | 37 +- wagtail_transfer/views.py | 220 +++--- wagtail_transfer/wagtail_hooks.py | 16 +- 25 files changed, 1354 insertions(+), 824 deletions(-) diff --git a/.circleci/report_nightly_build_failure.py b/.circleci/report_nightly_build_failure.py index 24bf2eb..77a9fbc 100644 --- a/.circleci/report_nightly_build_failure.py +++ b/.circleci/report_nightly_build_failure.py @@ -8,13 +8,18 @@ import requests -if 'SLACK_WEBHOOK_URL' in os.environ: +if "SLACK_WEBHOOK_URL" in os.environ: print("Reporting to #nightly-build-failures slack channel") - response = requests.post(os.environ['SLACK_WEBHOOK_URL'], json={ - "text": "A Nightly build failed. See " + os.environ['CIRCLE_BUILD_URL'], - }) + response = requests.post( + os.environ["SLACK_WEBHOOK_URL"], + json={ + "text": "A Nightly build failed. See " + os.environ["CIRCLE_BUILD_URL"], + }, + ) print("Slack responded with:", response) else: - print("Unable to report to #nightly-build-failures slack channel because SLACK_WEBHOOK_URL is not set") + print( + "Unable to report to #nightly-build-failures slack channel because SLACK_WEBHOOK_URL is not set" + ) diff --git a/runtests.py b/runtests.py index 1bd72c1..c3dd1c3 100755 --- a/runtests.py +++ b/runtests.py @@ -6,5 +6,5 @@ from django.core.management import execute_from_command_line -os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' -execute_from_command_line([sys.argv[0], 'test'] + sys.argv[1:]) +os.environ["DJANGO_SETTINGS_MODULE"] = "tests.settings" +execute_from_command_line([sys.argv[0], "test"] + sys.argv[1:]) diff --git a/setup.py b/setup.py index be75dec..e9b80c5 100644 --- a/setup.py +++ b/setup.py @@ -4,44 +4,42 @@ setup( - name='wagtail-transfer', - version='0.9.1', + name="wagtail-transfer", + version="0.9.1", description="Content transfer for Wagtail", - author='Matthew Westcott', - author_email='matthew.westcott@torchbox.com', - url='https://github.com/wagtail/wagtail-transfer', - packages=find_packages(exclude=('tests',)), + author="Matthew Westcott", + author_email="matthew.westcott@torchbox.com", + url="https://github.com/wagtail/wagtail-transfer", + packages=find_packages(exclude=("tests",)), include_package_data=True, - install_requires=[ - 'wagtail>=4.1' - ], + install_requires=["wagtail>=4.1"], extras_require={ - 'docs': [ - 'mkdocs>=1.0,<1.1', - 'mkdocs-material>=4.6,<4.7', + "docs": [ + "mkdocs>=1.0,<1.1", + "mkdocs-material>=4.6,<4.7", ], - 'dev': [ - 'ruff>=1.2.0', + "dev": [ + "ruff>=1.2.0", ], }, python_requires=">=3.7", - license='BSD', + license="BSD", long_description="An extension for Wagtail allowing content to be transferred between multiple instances of a Wagtail project", classifiers=[ - 'Development Status :: 4 - Beta', - 'Environment :: Web Environment', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: BSD License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'Framework :: Django', - 'Framework :: Wagtail', - 'Framework :: Wagtail :: 4', + "Development Status :: 4 - Beta", + "Environment :: Web Environment", + "Intended Audience :: Developers", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Framework :: Django", + "Framework :: Wagtail", + "Framework :: Wagtail :: 4", ], ) diff --git a/tests/apps.py b/tests/apps.py index b79f9f5..3e61cda 100644 --- a/tests/apps.py +++ b/tests/apps.py @@ -2,5 +2,5 @@ class WagtailTransferTestsAppConfig(AppConfig): - name = 'tests' - default_auto_field = 'django.db.models.AutoField' + name = "tests" + default_auto_field = "django.db.models.AutoField" diff --git a/tests/models.py b/tests/models.py index bfe8120..d2040f9 100644 --- a/tests/models.py +++ b/tests/models.py @@ -59,7 +59,9 @@ class PageWithRichText(Page): class PageWithStreamField(Page): - body = StreamField(BaseStreamBlock(), verbose_name="Page body", blank=True, use_json_field=True) + body = StreamField( + BaseStreamBlock(), verbose_name="Page body", blank=True, use_json_field=True + ) class PageWithParentalManyToMany(Page): diff --git a/tests/settings.py b/tests/settings.py index ea3c7c4..d593a17 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -10,137 +10,135 @@ # Application definition INSTALLED_APPS = [ - 'tests', - 'wagtail_transfer', - - 'wagtail.contrib.forms', - 'wagtail.contrib.redirects', - 'wagtail.embeds', - 'wagtail.sites', - 'wagtail.users', - 'wagtail.snippets', - 'wagtail.documents', - 'wagtail.images', - 'wagtail.search', - 'wagtail.admin', - 'wagtail', - 'modelcluster', - 'taggit', - - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', + "tests", + "wagtail_transfer", + "wagtail.contrib.forms", + "wagtail.contrib.redirects", + "wagtail.embeds", + "wagtail.sites", + "wagtail.users", + "wagtail.snippets", + "wagtail.documents", + "wagtail.images", + "wagtail.search", + "wagtail.admin", + "wagtail", + "modelcluster", + "taggit", + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", ] MIDDLEWARE = [ - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', - 'django.middleware.security.SecurityMiddleware', - - 'wagtail.contrib.redirects.middleware.RedirectMiddleware', + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", + "django.middleware.security.SecurityMiddleware", + "wagtail.contrib.redirects.middleware.RedirectMiddleware", ] -ROOT_URLCONF = 'tests.urls' +ROOT_URLCONF = "tests.urls" TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', + "BACKEND": "django.template.backends.django.DjangoTemplates", + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", ], }, }, ] -WSGI_APPLICATION = 'tests.wsgi.application' +WSGI_APPLICATION = "tests.wsgi.application" # Database # https://docs.djangoproject.com/en/{{ docs_version }}/ref/settings/#databases DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": os.path.join(BASE_DIR, "db.sqlite3"), } } AUTH_PASSWORD_VALIDATORS = [ { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", }, ] PASSWORD_HASHERS = ( - 'django.contrib.auth.hashers.MD5PasswordHasher', # don't use the intentionally slow default password hasher + "django.contrib.auth.hashers.MD5PasswordHasher", # don't use the intentionally slow default password hasher ) -LANGUAGE_CODE = 'en-us' -TIME_ZONE = 'UTC' +LANGUAGE_CODE = "en-us" +TIME_ZONE = "UTC" USE_I18N = True USE_L10N = True USE_TZ = True STATICFILES_FINDERS = [ - 'django.contrib.staticfiles.finders.FileSystemFinder', - 'django.contrib.staticfiles.finders.AppDirectoriesFinder', + "django.contrib.staticfiles.finders.FileSystemFinder", + "django.contrib.staticfiles.finders.AppDirectoriesFinder", ] -STATIC_ROOT = os.path.join(BASE_DIR, 'static') -STATIC_URL = '/static/' +STATIC_ROOT = os.path.join(BASE_DIR, "static") +STATIC_URL = "/static/" -MEDIA_ROOT = os.path.join(BASE_DIR, 'test-media') -MEDIA_URL = 'http://media.example.com/media/' +MEDIA_ROOT = os.path.join(BASE_DIR, "test-media") +MEDIA_URL = "http://media.example.com/media/" -SECRET_KEY = 'not needed' +SECRET_KEY = "not needed" # Wagtail settings WAGTAIL_SITE_NAME = "wagtail-transfer" -WAGTAILADMIN_BASE_URL = 'http://example.com' +WAGTAILADMIN_BASE_URL = "http://example.com" WAGTAILTRANSFER_SOURCES = { - 'staging': { - 'BASE_URL': 'https://www.example.com/wagtail-transfer/', - 'SECRET_KEY': 'i-am-the-staging-example-secret-key', + "staging": { + "BASE_URL": "https://www.example.com/wagtail-transfer/", + "SECRET_KEY": "i-am-the-staging-example-secret-key", }, - 'local': { + "local": { # so that we can use the wagtail_transfer.auth.digest_for_source helper in API tests - 'BASE_URL': 'http://localhost/wagtail-transfer/', - 'SECRET_KEY': 'i-am-the-local-secret-key', - } + "BASE_URL": "http://localhost/wagtail-transfer/", + "SECRET_KEY": "i-am-the-local-secret-key", + }, } -WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS = [('wagtailimages.image', 'tagged_items', True), ('tests.advert', 'tagged_items', True)] +WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS = [ + ("wagtailimages.image", "tagged_items", True), + ("tests.advert", "tagged_items", True), +] -WAGTAILTRANSFER_SECRET_KEY = 'i-am-the-local-secret-key' +WAGTAILTRANSFER_SECRET_KEY = "i-am-the-local-secret-key" -WAGTAILTRANSFER_UPDATE_RELATED_MODELS = ['wagtailimages.Image', 'tests.advert'] +WAGTAILTRANSFER_UPDATE_RELATED_MODELS = ["wagtailimages.Image", "tests.advert"] -WAGTAILTRANSFER_LOOKUP_FIELDS = { - 'tests.category': ['name'] -} +WAGTAILTRANSFER_LOOKUP_FIELDS = {"tests.category": ["name"]} -WAGTAILADMIN_BASE_URL = 'http://example.com' +WAGTAILADMIN_BASE_URL = "http://example.com" diff --git a/tests/tests/test_api.py b/tests/tests/test_api.py index b028a33..25c425b 100644 --- a/tests/tests/test_api.py +++ b/tests/tests/test_api.py @@ -33,57 +33,63 @@ # We could use settings.MEDIA_ROOT here, but this way we avoid clobbering a real media folder if we # ever run these tests with non-test settings for any reason -TEST_MEDIA_DIR = os.path.join(os.path.join(settings.BASE_DIR, 'test-media')) -FIXTURES_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'fixtures') +TEST_MEDIA_DIR = os.path.join(os.path.join(settings.BASE_DIR, "test-media")) +FIXTURES_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "fixtures") class TestPageChooserApi(TestCase): def test_incorrect_digest(self): response = self.client.get( - '/wagtail-transfer/api/chooser/pages/?child_of=root&fields=parent%2Cchildren&limit=20&offset=0&digest=4' + "/wagtail-transfer/api/chooser/pages/?child_of=root&fields=parent%2Cchildren&limit=20&offset=0&digest=4" ) self.assertEqual(response.status_code, 403) def test_correct_digest(self): - digest = digest_for_source('local', 'child_of=root&fields=parent%2Cchildren&limit=20&offset=0') + digest = digest_for_source( + "local", "child_of=root&fields=parent%2Cchildren&limit=20&offset=0" + ) response = self.client.get( - f'/wagtail-transfer/api/chooser/pages/?child_of=root&fields=parent%2Cchildren&limit=20&offset=0&digest={digest}' + f"/wagtail-transfer/api/chooser/pages/?child_of=root&fields=parent%2Cchildren&limit=20&offset=0&digest={digest}" ) self.assertEqual(response.status_code, 200) class TestModelsApi(TestCase): - fixtures = ['test.json'] + fixtures = ["test.json"] - def get_parameters(self, initial_get='models=true'): - digest = digest_for_source('local', initial_get) - return f'?{initial_get}&digest={digest}' + def get_parameters(self, initial_get="models=true"): + digest = digest_for_source("local", initial_get) + return f"?{initial_get}&digest={digest}" def test_model_chooser_response(self): - response = self.client.get(f'/wagtail-transfer/api/chooser/models/{self.get_parameters()}') + response = self.client.get( + f"/wagtail-transfer/api/chooser/models/{self.get_parameters()}" + ) self.assertEqual(response.status_code, 200) content = json.loads(response.content.decode("utf-8")) - self.assertEqual(content['meta']['total_count'], 1) + self.assertEqual(content["meta"]["total_count"], 1) - snippet = content['items'][0] - self.assertEqual(snippet['model_label'], 'tests.category') - self.assertEqual(snippet['name'], 'Category') + snippet = content["items"][0] + self.assertEqual(snippet["model_label"], "tests.category") + self.assertEqual(snippet["name"], "Category") def test_model_object_chooser(self): - response = self.client.get(f'/wagtail-transfer/api/chooser/models/tests.category/{self.get_parameters()}') + response = self.client.get( + f"/wagtail-transfer/api/chooser/models/tests.category/{self.get_parameters()}" + ) self.assertEqual(response.status_code, 200) content = json.loads(response.content.decode("utf-8")) - self.assertEqual(content['meta']['total_count'], 1) - self.assertEqual(content['meta']['next'], None) - self.assertEqual(content['meta']['previous'], None) + self.assertEqual(content["meta"]["total_count"], 1) + self.assertEqual(content["meta"]["next"], None) + self.assertEqual(content["meta"]["previous"], None) - snippet = content['items'][0] - self.assertEqual(snippet['model_label'], 'tests.category') - self.assertEqual(snippet['object_name'], 'red Cars') - self.assertEqual(snippet['name'], 'Cars') - self.assertEqual(snippet['colour'], 'red') + snippet = content["items"][0] + self.assertEqual(snippet["model_label"], "tests.category") + self.assertEqual(snippet["object_name"], "red Cars") + self.assertEqual(snippet["name"], "Cars") + self.assertEqual(snippet["colour"], "red") def test_model_object_next_pagination(self): # Create 50 more categories @@ -91,15 +97,17 @@ def test_model_object_next_pagination(self): name = f"Car #{i}" Category.objects.create(name=name, colour="Violet") - response = self.client.get(f'/wagtail-transfer/api/chooser/models/tests.category/{self.get_parameters()}') + response = self.client.get( + f"/wagtail-transfer/api/chooser/models/tests.category/{self.get_parameters()}" + ) self.assertEqual(response.status_code, 200) content = json.loads(response.content.decode("utf-8")) - self.assertEqual(content['meta']['total_count'], 51) - self.assertTrue(bool(content['meta']['next'])) - self.assertFalse(bool(content['meta']['previous'])) + self.assertEqual(content["meta"]["total_count"], 51) + self.assertTrue(bool(content["meta"]["next"])) + self.assertFalse(bool(content["meta"]["previous"])) - items = content['items'] + items = content["items"] self.assertEqual(len(items), 20) # Remove the newly created categories @@ -111,15 +119,17 @@ def test_model_object_previous_and_next_pagination(self): name = f"Car #{i}" Category.objects.create(name=name, colour="Violet") - response = self.client.get(f'/wagtail-transfer/api/chooser/models/tests.category/{self.get_parameters("page=2")}') + response = self.client.get( + f'/wagtail-transfer/api/chooser/models/tests.category/{self.get_parameters("page=2")}' + ) self.assertEqual(response.status_code, 200) content = json.loads(response.content.decode("utf-8")) - self.assertEqual(content['meta']['total_count'], 51) - self.assertTrue(bool(content['meta']['previous'])) - self.assertTrue(bool(content['meta']['next'])) + self.assertEqual(content["meta"]["total_count"], 51) + self.assertTrue(bool(content["meta"]["previous"])) + self.assertTrue(bool(content["meta"]["next"])) - items = content['items'] + items = content["items"] self.assertEqual(len(items), 20) # Remove the newly created categories @@ -131,17 +141,19 @@ def test_model_object_previous_pagination(self): name = f"Car #{i}" Category.objects.create(name=name, colour="Violet") - response = self.client.get(f'/wagtail-transfer/api/chooser/models/tests.category/{self.get_parameters("page=3")}') + response = self.client.get( + f'/wagtail-transfer/api/chooser/models/tests.category/{self.get_parameters("page=3")}' + ) self.assertEqual(response.status_code, 200) content = json.loads(response.content.decode("utf-8")) - self.assertEqual(content['meta']['total_count'], 51) - self.assertTrue(bool(content['meta']['previous'])) - self.assertFalse(bool(content['meta']['next'])) + self.assertEqual(content["meta"]["total_count"], 51) + self.assertTrue(bool(content["meta"]["previous"])) + self.assertFalse(bool(content["meta"]["next"])) # Pagination happens 20 at a time by default. # Page 3 = 2 pages of 20, with 11 remaining. - items = content['items'] + items = content["items"] self.assertEqual(len(items), 11) # Remove the newly created categories @@ -149,14 +161,17 @@ def test_model_object_previous_pagination(self): class TestPagesApi(TestCase): - fixtures = ['test.json'] + fixtures = ["test.json"] def get(self, page_id, recursive=True): - digest = digest_for_source('local', str(page_id)) - return self.client.get('/wagtail-transfer/api/pages/%d/?digest=%s&recursive=%s' % (page_id, digest, str(recursive).lower())) + digest = digest_for_source("local", str(page_id)) + return self.client.get( + "/wagtail-transfer/api/pages/%d/?digest=%s&recursive=%s" + % (page_id, digest, str(recursive).lower()) + ) def test_incorrect_digest(self): - response = self.client.get('/wagtail-transfer/api/pages/2/?digest=12345678') + response = self.client.get("/wagtail-transfer/api/pages/2/?digest=12345678") self.assertEqual(response.status_code, 403) def test_pages_api(self): @@ -164,23 +179,27 @@ def test_pages_api(self): self.assertEqual(response.status_code, 200) data = json.loads(response.content) - ids_for_import = data['ids_for_import'] - self.assertIn(['wagtailcore.page', 2], ids_for_import) - self.assertNotIn(['wagtailcore.page', 1], ids_for_import) + ids_for_import = data["ids_for_import"] + self.assertIn(["wagtailcore.page", 2], ids_for_import) + self.assertNotIn(["wagtailcore.page", 1], ids_for_import) homepage = None - for obj in data['objects']: - if obj['model'] == 'tests.simplepage' and obj['pk'] == 2: + for obj in data["objects"]: + if obj["model"] == "tests.simplepage" and obj["pk"] == 2: homepage = obj break self.assertTrue(homepage) - self.assertEqual(homepage['parent_id'], 1) - self.assertEqual(homepage['fields']['intro'], "This is the homepage") + self.assertEqual(homepage["parent_id"], 1) + self.assertEqual(homepage["fields"]["intro"], "This is the homepage") - mappings = data['mappings'] - self.assertIn(['wagtailcore.page', 2, "22222222-2222-2222-2222-222222222222"], mappings) - self.assertIn(['tests.advert', 1, "adadadad-1111-1111-1111-111111111111"], mappings) + mappings = data["mappings"] + self.assertIn( + ["wagtailcore.page", 2, "22222222-2222-2222-2222-222222222222"], mappings + ) + self.assertIn( + ["tests.advert", 1, "adadadad-1111-1111-1111-111111111111"], mappings + ) def test_export_root(self): response = self.get(1) @@ -188,16 +207,16 @@ def test_export_root(self): data = json.loads(response.content) root_page = None - for obj in data['objects']: - if obj['model'] == 'wagtailcore.page' and obj['pk'] == 1: + for obj in data["objects"]: + if obj["model"] == "wagtailcore.page" and obj["pk"] == 1: root_page = obj break self.assertTrue(root_page) - self.assertEqual(root_page['parent_id'], None) + self.assertEqual(root_page["parent_id"], None) # check that the child page will also be imported - self.assertIn(['wagtailcore.page', 2], data['ids_for_import']) + self.assertIn(["wagtailcore.page", 2], data["ids_for_import"]) def test_export_nonrecursive(self): response = self.get(1, recursive=False) @@ -205,17 +224,21 @@ def test_export_nonrecursive(self): data = json.loads(response.content) # check that the child page will not be imported - self.assertNotIn(['wagtailcore.page', 2], data['ids_for_import']) + self.assertNotIn(["wagtailcore.page", 2], data["ids_for_import"]) # check that the original page is still listed for import - self.assertIn(['wagtailcore.page', 1], data['ids_for_import']) + self.assertIn(["wagtailcore.page", 1], data["ids_for_import"]) def test_parental_keys(self): - page = SectionedPage(title='How to make a cake', intro="Here is how to make a cake.") - page.sections.create(title="Create the universe", body="First, create the universe") + page = SectionedPage( + title="How to make a cake", intro="Here is how to make a cake." + ) + page.sections.create( + title="Create the universe", body="First, create the universe" + ) page.sections.create(title="Find some eggs", body="Next, find some eggs") - parent_page = Page.objects.get(url_path='/home/existing-child-page/') + parent_page = Page.objects.get(url_path="/home/existing-child-page/") parent_page.add_child(instance=page) response = self.get(parent_page.id) @@ -224,28 +247,32 @@ def test_parental_keys(self): page_data = None section_data = [] - for obj in data['objects']: - if obj['model'] == 'tests.sectionedpage' and obj['pk'] == page.pk: + for obj in data["objects"]: + if obj["model"] == "tests.sectionedpage" and obj["pk"] == page.pk: page_data = obj - if obj['model'] == 'tests.sectionedpagesection': + if obj["model"] == "tests.sectionedpagesection": section_data.append(obj) - self.assertEqual(len(page_data['fields']['sections']), 2) - self.assertEqual(section_data[0]['model'], 'tests.sectionedpagesection') - self.assertTrue(section_data[0]['fields']['title'] == "Create the universe") - section_id = page_data['fields']['sections'][0] + self.assertEqual(len(page_data["fields"]["sections"]), 2) + self.assertEqual(section_data[0]["model"], "tests.sectionedpagesection") + self.assertTrue(section_data[0]["fields"]["title"] == "Create the universe") + section_id = page_data["fields"]["sections"][0] # there should also be a uid mapping for the section matching_uids = [ - uid for model_name, pk, uid in data['mappings'] - if model_name == 'tests.sectionedpagesection' and pk == section_id + uid + for model_name, pk, uid in data["mappings"] + if model_name == "tests.sectionedpagesection" and pk == section_id ] self.assertEqual(len(matching_uids), 1) def test_rich_text_with_page_link(self): - page = PageWithRichText(title="You won't believe how rich this cake was!", body='

But I have a link

') + page = PageWithRichText( + title="You won't believe how rich this cake was!", + body='

But I have a link

', + ) - parent_page = Page.objects.get(url_path='/home/existing-child-page/') + parent_page = Page.objects.get(url_path="/home/existing-child-page/") parent_page.add_child(instance=page) response = self.get(page.id) @@ -253,12 +280,18 @@ def test_rich_text_with_page_link(self): self.assertEqual(response.status_code, 200) data = json.loads(response.content) - self.assertIn(['wagtailcore.page', 1, '11111111-1111-1111-1111-111111111111'], data['mappings']) + self.assertIn( + ["wagtailcore.page", 1, "11111111-1111-1111-1111-111111111111"], + data["mappings"], + ) def test_rich_text_with_dead_page_link(self): - page = PageWithRichText(title="You won't believe how rich this cake was!", body='

But I have a link

') + page = PageWithRichText( + title="You won't believe how rich this cake was!", + body='

But I have a link

', + ) - parent_page = Page.objects.get(url_path='/home/existing-child-page/') + parent_page = Page.objects.get(url_path="/home/existing-child-page/") parent_page.add_child(instance=page) response = self.get(page.id) @@ -266,15 +299,17 @@ def test_rich_text_with_dead_page_link(self): self.assertEqual(response.status_code, 200) data = json.loads(response.content) - self.assertTrue(any( - model == 'wagtailcore.page' and id == 999 - for model, id, uid in data['mappings'] - )) + self.assertTrue( + any( + model == "wagtailcore.page" and id == 999 + for model, id, uid in data["mappings"] + ) + ) def test_null_rich_text(self): page = PageWithRichText(title="I'm lost for words", body=None) - parent_page = Page.objects.get(url_path='/home/existing-child-page/') + parent_page = Page.objects.get(url_path="/home/existing-child-page/") parent_page.add_child(instance=page) response = self.get(page.id) @@ -282,22 +317,21 @@ def test_null_rich_text(self): self.assertEqual(response.status_code, 200) data = json.loads(response.content) - self.assertTrue(any( - obj['pk'] == page.pk - for obj in data['objects'] - )) + self.assertTrue(any(obj["pk"] == page.pk for obj in data["objects"])) def test_rich_text_with_image_embed(self): - with open(os.path.join(FIXTURES_DIR, 'wagtail.jpg'), 'rb') as f: + with open(os.path.join(FIXTURES_DIR, "wagtail.jpg"), "rb") as f: image = Image.objects.create( - title="Wagtail", - file=ImageFile(f, name='wagtail.jpg') + title="Wagtail", file=ImageFile(f, name="wagtail.jpg") ) - body = '

Here is an image

' % image.pk + body = ( + '

Here is an image

' + % image.pk + ) page = PageWithRichText(title="The cake is a lie.", body=body) - parent_page = Page.objects.get(url_path='/home/existing-child-page/') + parent_page = Page.objects.get(url_path="/home/existing-child-page/") parent_page.add_child(instance=page) response = self.get(page.id) @@ -305,130 +339,203 @@ def test_rich_text_with_image_embed(self): self.assertEqual(response.status_code, 200) data = json.loads(response.content) - self.assertTrue(any( - model == 'wagtailimages.image' and pk == image.pk - for model, pk, uid in data['mappings'] - )) + self.assertTrue( + any( + model == "wagtailimages.image" and pk == image.pk + for model, pk, uid in data["mappings"] + ) + ) def test_streamfield_with_page_links_in_new_listblock_format(self): - page = PageWithStreamField(title="I have a streamfield", - body=json.dumps([ - {'type': 'list_of_captioned_pages', - 'value': - [{'type': 'item', - 'value': { - 'page': 5, - 'text': 'a caption' - }, - 'id': '8c0d7de7-4f77-4477-be67-7d990d0bfb82'}], - 'id': '21ffe52a-c0fc-4ecc-92f1-17b356c9cc94'}, - ])) - parent_page = Page.objects.get(url_path='/home/existing-child-page/') + page = PageWithStreamField( + title="I have a streamfield", + body=json.dumps( + [ + { + "type": "list_of_captioned_pages", + "value": [ + { + "type": "item", + "value": {"page": 5, "text": "a caption"}, + "id": "8c0d7de7-4f77-4477-be67-7d990d0bfb82", + } + ], + "id": "21ffe52a-c0fc-4ecc-92f1-17b356c9cc94", + }, + ] + ), + ) + parent_page = Page.objects.get(url_path="/home/existing-child-page/") parent_page.add_child(instance=page) - digest = digest_for_source('local', str(page.id)) - response = self.client.get('/wagtail-transfer/api/pages/%d/?digest=%s' % (page.id, digest)) + digest = digest_for_source("local", str(page.id)) + response = self.client.get( + "/wagtail-transfer/api/pages/%d/?digest=%s" % (page.id, digest) + ) data = json.loads(response.content) # test PageChooserBlock in ListBlock - self.assertIn(['wagtailcore.page', 5, "00017017-5555-5555-5555-555555555555"], data['mappings']) + self.assertIn( + ["wagtailcore.page", 5, "00017017-5555-5555-5555-555555555555"], + data["mappings"], + ) def test_streamfield_with_page_links(self): # Check that page links in a complex nested StreamField - with StreamBlock, StructBlock, and ListBlock - # are all picked up in mappings - page = PageWithStreamField(title="I have a streamfield", - body=json.dumps([{'type': 'link_block', - 'value': - {'page': 1, - 'text': 'Test'}, - 'id': 'fc3b0d3d-d316-4271-9e31-84919558188a'}, - {'type': 'page', - 'value': 2, - 'id': 'c6d07d3a-72d4-445e-8fa5-b34107291176'}, - {'type': 'stream', - 'value': - [{'type': 'page', - 'value': 3, - 'id': '8c0d7de7-4f77-4477-be67-7d990d0bfb82'}], - 'id': '21ffe52a-c0fc-4ecc-92f1-17b356c9cc94'}, - {'type': 'list_of_pages', - 'value': [5], - 'id': '17b972cb-a952-4940-87e2-e4eb00703997'}])) - parent_page = Page.objects.get(url_path='/home/existing-child-page/') + page = PageWithStreamField( + title="I have a streamfield", + body=json.dumps( + [ + { + "type": "link_block", + "value": {"page": 1, "text": "Test"}, + "id": "fc3b0d3d-d316-4271-9e31-84919558188a", + }, + { + "type": "page", + "value": 2, + "id": "c6d07d3a-72d4-445e-8fa5-b34107291176", + }, + { + "type": "stream", + "value": [ + { + "type": "page", + "value": 3, + "id": "8c0d7de7-4f77-4477-be67-7d990d0bfb82", + } + ], + "id": "21ffe52a-c0fc-4ecc-92f1-17b356c9cc94", + }, + { + "type": "list_of_pages", + "value": [5], + "id": "17b972cb-a952-4940-87e2-e4eb00703997", + }, + ] + ), + ) + parent_page = Page.objects.get(url_path="/home/existing-child-page/") parent_page.add_child(instance=page) - digest = digest_for_source('local', str(page.id)) - response = self.client.get('/wagtail-transfer/api/pages/%d/?digest=%s' % (page.id, digest)) + digest = digest_for_source("local", str(page.id)) + response = self.client.get( + "/wagtail-transfer/api/pages/%d/?digest=%s" % (page.id, digest) + ) data = json.loads(response.content) # test PageChooserBlock in StructBlock - self.assertIn(['wagtailcore.page', 1, '11111111-1111-1111-1111-111111111111'], data['mappings']) + self.assertIn( + ["wagtailcore.page", 1, "11111111-1111-1111-1111-111111111111"], + data["mappings"], + ) # test un-nested PageChooserBlock - self.assertIn(['wagtailcore.page', 2, "22222222-2222-2222-2222-222222222222"], data['mappings']) + self.assertIn( + ["wagtailcore.page", 2, "22222222-2222-2222-2222-222222222222"], + data["mappings"], + ) # test PageChooserBlock in StreamBlock - self.assertIn(['wagtailcore.page', 3, "33333333-3333-3333-3333-333333333333"], data['mappings']) + self.assertIn( + ["wagtailcore.page", 3, "33333333-3333-3333-3333-333333333333"], + data["mappings"], + ) # test PageChooserBlock in ListBlock - self.assertIn(['wagtailcore.page', 5, "00017017-5555-5555-5555-555555555555"], data['mappings']) + self.assertIn( + ["wagtailcore.page", 5, "00017017-5555-5555-5555-555555555555"], + data["mappings"], + ) def test_streamfield_with_rich_text(self): # Check that page references within a RichTextBlock in StreamField are found correctly - page = PageWithStreamField(title="My streamfield rich text block has a link", - body=json.dumps([{'type': 'rich_text', - 'value': '

I link to a page.

', - 'id': '7d4ee3d4-9213-4319-b984-45be4ded8853'}])) + page = PageWithStreamField( + title="My streamfield rich text block has a link", + body=json.dumps( + [ + { + "type": "rich_text", + "value": '

I link to a page.

', + "id": "7d4ee3d4-9213-4319-b984-45be4ded8853", + } + ] + ), + ) - parent_page = Page.objects.get(url_path='/home/existing-child-page/') + parent_page = Page.objects.get(url_path="/home/existing-child-page/") parent_page.add_child(instance=page) - digest = digest_for_source('local', str(page.id)) - response = self.client.get('/wagtail-transfer/api/pages/%d/?digest=%s' % (page.id, digest)) + digest = digest_for_source("local", str(page.id)) + response = self.client.get( + "/wagtail-transfer/api/pages/%d/?digest=%s" % (page.id, digest) + ) data = json.loads(response.content) - self.assertIn(['wagtailcore.page', 1, '11111111-1111-1111-1111-111111111111'], data['mappings']) + self.assertIn( + ["wagtailcore.page", 1, "11111111-1111-1111-1111-111111111111"], + data["mappings"], + ) def test_streamfield_with_dead_page_link(self): page = PageWithStreamField( title="I have a streamfield", - body=json.dumps([ - {'type': 'link_block', 'value': {'page': 999, 'text': 'Test'}, 'id': 'fc3b0d3d-d316-4271-9e31-84919558188a'}, - ]) + body=json.dumps( + [ + { + "type": "link_block", + "value": {"page": 999, "text": "Test"}, + "id": "fc3b0d3d-d316-4271-9e31-84919558188a", + }, + ] + ), ) - parent_page = Page.objects.get(url_path='/home/existing-child-page/') + parent_page = Page.objects.get(url_path="/home/existing-child-page/") parent_page.add_child(instance=page) - digest = digest_for_source('local', str(page.id)) - response = self.client.get('/wagtail-transfer/api/pages/%d/?digest=%s' % (page.id, digest)) + digest = digest_for_source("local", str(page.id)) + response = self.client.get( + "/wagtail-transfer/api/pages/%d/?digest=%s" % (page.id, digest) + ) data = json.loads(response.content) - self.assertTrue(any( - model == 'wagtailcore.page' and id == 999 - for model, id, uid in data['mappings'] - )) + self.assertTrue( + any( + model == "wagtailcore.page" and id == 999 + for model, id, uid in data["mappings"] + ) + ) def test_streamfield_with_null_page(self): # We should gracefully handle null values in non-required chooser blocks page = PageWithStreamField( title="I have a streamfield", - body=json.dumps([{ - 'type': 'link_block', - 'value': {'page': None, 'text': 'Empty test'}, - 'id': 'fc3b0d3d-d316-4271-9e31-84919558188a' - },]) + body=json.dumps( + [ + { + "type": "link_block", + "value": {"page": None, "text": "Empty test"}, + "id": "fc3b0d3d-d316-4271-9e31-84919558188a", + }, + ] + ), ) - parent_page = Page.objects.get(url_path='/home/existing-child-page/') + parent_page = Page.objects.get(url_path="/home/existing-child-page/") parent_page.add_child(instance=page) - digest = digest_for_source('local', str(page.id)) - response = self.client.get('/wagtail-transfer/api/pages/%d/?digest=%s' % (page.id, digest)) + digest = digest_for_source("local", str(page.id)) + response = self.client.get( + "/wagtail-transfer/api/pages/%d/?digest=%s" % (page.id, digest) + ) data = json.loads(response.content) # result should have a mapping for the page we just created, and its parent - page_mappings = filter(lambda mapping: mapping[0] == 'wagtailcore.page', data['mappings']) + page_mappings = filter( + lambda mapping: mapping[0] == "wagtailcore.page", data["mappings"] + ) self.assertEqual(len(list(page_mappings)), 2) def test_parental_many_to_many(self): @@ -437,35 +544,43 @@ def test_parental_many_to_many(self): advert_3 = Advert.objects.get(id=3) page.ads = [advert_2, advert_3] - parent_page = Page.objects.get(url_path='/home/existing-child-page/') + parent_page = Page.objects.get(url_path="/home/existing-child-page/") parent_page.add_child(instance=page) - digest = digest_for_source('local', str(page.id)) - response = self.client.get('/wagtail-transfer/api/pages/%d/?digest=%s' % (page.id, digest)) + digest = digest_for_source("local", str(page.id)) + response = self.client.get( + "/wagtail-transfer/api/pages/%d/?digest=%s" % (page.id, digest) + ) data = json.loads(response.content) - self.assertIn(['tests.advert', 2, "adadadad-2222-2222-2222-222222222222"], data['mappings']) - self.assertIn(['tests.advert', 3, "adadadad-3333-3333-3333-333333333333"], data['mappings']) - self.assertEqual({2, 3}, set(data['objects'][0]['fields']['ads'])) + self.assertIn( + ["tests.advert", 2, "adadadad-2222-2222-2222-222222222222"], + data["mappings"], + ) + self.assertIn( + ["tests.advert", 3, "adadadad-3333-3333-3333-333333333333"], + data["mappings"], + ) + self.assertEqual({2, 3}, set(data["objects"][0]["fields"]["ads"])) def test_related_model_with_field_lookup(self): page = SponsoredPage.objects.get(id=5) - page.categories.add(Category.objects.get(name='Cars')) + page.categories.add(Category.objects.get(name="Cars")) page.save() response = self.get(5) self.assertEqual(response.status_code, 200) data = json.loads(response.content) - mappings = data['mappings'] + mappings = data["mappings"] # Category objects in the mappings section should be identified by name, not UUID - self.assertIn(['tests.category', 1, ['Cars']], mappings) + self.assertIn(["tests.category", 1, ["Cars"]], mappings) class TestObjectsApi(TestCase): - fixtures = ['test.json'] + fixtures = ["test.json"] def setUp(self): shutil.rmtree(TEST_MEDIA_DIR, ignore_errors=True) @@ -474,40 +589,49 @@ def tearDown(self): shutil.rmtree(TEST_MEDIA_DIR, ignore_errors=True) def test_incorrect_digest(self): - request_body = json.dumps({ - 'tests.advert': [1] - }) + request_body = json.dumps({"tests.advert": [1]}) response = self.client.post( - '/wagtail-transfer/api/objects/?digest=12345678', request_body, content_type='application/json' + "/wagtail-transfer/api/objects/?digest=12345678", + request_body, + content_type="application/json", ) self.assertEqual(response.status_code, 403) def get(self, request_body): request_json = json.dumps(request_body) - digest = digest_for_source('local', request_json) + digest = digest_for_source("local", request_json) return self.client.post( - '/wagtail-transfer/api/objects/?digest=%s' % digest, request_json, content_type='application/json' + "/wagtail-transfer/api/objects/?digest=%s" % digest, + request_json, + content_type="application/json", ) def test_objects_api(self): - response = self.get({ - 'tests.advert': [1] - }) + response = self.get({"tests.advert": [1]}) self.assertEqual(response.status_code, 200) data = json.loads(response.content) - self.assertEqual(data['ids_for_import'], []) - self.assertEqual(data['objects'][0]['model'], 'tests.advert') - self.assertEqual(data['objects'][0]['fields']['slogan'], "put a tiger in your tank") - self.assertEqual(data['objects'][0]['fields']['run_until'], "2020-12-23T21:00:00Z") - self.assertEqual(data['objects'][0]['fields']['run_from'], None) + self.assertEqual(data["ids_for_import"], []) + self.assertEqual(data["objects"][0]["model"], "tests.advert") + self.assertEqual( + data["objects"][0]["fields"]["slogan"], "put a tiger in your tank" + ) + self.assertEqual( + data["objects"][0]["fields"]["run_until"], "2020-12-23T21:00:00Z" + ) + self.assertEqual(data["objects"][0]["fields"]["run_from"], None) - self.assertEqual(data['mappings'], [['tests.advert', 1, 'adadadad-1111-1111-1111-111111111111']]) + self.assertEqual( + data["mappings"], + [["tests.advert", 1, "adadadad-1111-1111-1111-111111111111"]], + ) def test_objects_api_with_tree_model(self): root_collection = Collection.objects.get() - collection = root_collection.add_child(instance=Collection(name="Test collection")) + collection = root_collection.add_child( + instance=Collection(name="Test collection") + ) collection_uid = uuid.uuid4() collection_content_type = ContentType.objects.get_for_model(Collection) @@ -518,27 +642,25 @@ def test_objects_api_with_tree_model(self): uid=collection_uid, ) - response = self.get({ - 'wagtailcore.collection': [collection.id] - }) + response = self.get({"wagtailcore.collection": [collection.id]}) self.assertEqual(response.status_code, 200) data = json.loads(response.content) - self.assertEqual(data['ids_for_import'], []) - self.assertEqual(data['objects'][0]['model'], 'wagtailcore.collection') - self.assertEqual(data['objects'][0]['fields']['name'], "Test collection") + self.assertEqual(data["ids_for_import"], []) + self.assertEqual(data["objects"][0]["model"], "wagtailcore.collection") + self.assertEqual(data["objects"][0]["fields"]["name"], "Test collection") # mappings should contain entries for the requested collection and its parent self.assertIn( - ['wagtailcore.collection', collection.id, str(collection_uid)], - data['mappings'] + ["wagtailcore.collection", collection.id, str(collection_uid)], + data["mappings"], ) root_collection_uid = IDMapping.objects.get( content_type=collection_content_type, local_id=root_collection.id ).uid self.assertIn( - ['wagtailcore.collection', root_collection.id, str(root_collection_uid)], - data['mappings'] + ["wagtailcore.collection", root_collection.id, str(root_collection_uid)], + data["mappings"], ) def test_many_to_many(self): @@ -548,161 +670,180 @@ def test_many_to_many(self): ad_holder.ads.set([advert_2, advert_3]) ad_holder.save() - response = self.get({ - 'tests.modelwithmanytomany': [1] - }) + response = self.get({"tests.modelwithmanytomany": [1]}) self.assertEqual(response.status_code, 200) data = json.loads(response.content) - self.assertIn(['tests.advert', 2, "adadadad-2222-2222-2222-222222222222"], data['mappings']) - self.assertIn(['tests.advert', 3, "adadadad-3333-3333-3333-333333333333"], data['mappings']) - self.assertEqual({2, 3}, set(data['objects'][0]['fields']['ads'])) + self.assertIn( + ["tests.advert", 2, "adadadad-2222-2222-2222-222222222222"], + data["mappings"], + ) + self.assertIn( + ["tests.advert", 3, "adadadad-3333-3333-3333-333333333333"], + data["mappings"], + ) + self.assertEqual({2, 3}, set(data["objects"][0]["fields"]["ads"])) def test_model_with_field_lookup(self): - response = self.get({ - 'tests.category': [1] - }) + response = self.get({"tests.category": [1]}) self.assertEqual(response.status_code, 200) data = json.loads(response.content) # Category objects in the mappings section should be identified by name, not UUID - self.assertIn(['tests.category', 1, ['Cars']], data['mappings']) + self.assertIn(["tests.category", 1, ["Cars"]], data["mappings"]) def test_model_with_multi_table_inheritance(self): # LongAdvert inherits from Advert. Fetching the base instance over the objects api should # return a LongAdvert model - long_ad = LongAdvert.objects.create(slogan='test', run_until=datetime.now(timezone.utc), description='longertest') + long_ad = LongAdvert.objects.create( + slogan="test", + run_until=datetime.now(timezone.utc), + description="longertest", + ) - response = self.get({ - 'tests.advert': [long_ad.pk] - }) + response = self.get({"tests.advert": [long_ad.pk]}) self.assertEqual(response.status_code, 200) data = json.loads(response.content) - self.assertEqual(data['mappings'][0][0], 'tests.advert') + self.assertEqual(data["mappings"][0][0], "tests.advert") # mappings should be for the base object - self.assertEqual(data['objects'][0]['model'], 'tests.longadvert') + self.assertEqual(data["objects"][0]["model"], "tests.longadvert") # the child object should be serialized def test_model_with_tags(self): # test that a reverse relation such as tagged_items is followed to obtain references to the # tagged_items, if the model and relationship are specified in WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS - ad = Advert.objects.create(slogan='test', run_until=datetime.now(timezone.utc)) - ad.tags.add('test_tag') + ad = Advert.objects.create(slogan="test", run_until=datetime.now(timezone.utc)) + ad.tags.add("test_tag") - response = self.get({ - 'tests.advert': [ad.pk] - }) + response = self.get({"tests.advert": [ad.pk]}) self.assertEqual(response.status_code, 200) data = json.loads(response.content) - mapped_models = {mapping[0] for mapping in data['mappings']} - self.assertIn('taggit.taggeditem', mapped_models) + mapped_models = {mapping[0] for mapping in data["mappings"]} + self.assertIn("taggit.taggeditem", mapped_models) def test_image(self): - with open(os.path.join(FIXTURES_DIR, 'wagtail.jpg'), 'rb') as f: + with open(os.path.join(FIXTURES_DIR, "wagtail.jpg"), "rb") as f: image = Image.objects.create( - title="Wagtail", - file=ImageFile(f, name='wagtail.jpg') + title="Wagtail", file=ImageFile(f, name="wagtail.jpg") ) - response = self.get({ - 'wagtailimages.image': [image.pk] - }) + response = self.get({"wagtailimages.image": [image.pk]}) self.assertEqual(response.status_code, 200) data = json.loads(response.content) - self.assertEqual(len(data['objects']), 1) - obj = data['objects'][0] - self.assertEqual(obj['fields']['file']['download_url'], 'http://media.example.com/media/original_images/wagtail.jpg') - self.assertEqual(obj['fields']['file']['size'], 1160) - self.assertEqual(obj['fields']['file']['hash'], '45c5db99aea04378498883b008ee07528f5ae416') + self.assertEqual(len(data["objects"]), 1) + obj = data["objects"][0] + self.assertEqual( + obj["fields"]["file"]["download_url"], + "http://media.example.com/media/original_images/wagtail.jpg", + ) + self.assertEqual(obj["fields"]["file"]["size"], 1160) + self.assertEqual( + obj["fields"]["file"]["hash"], "45c5db99aea04378498883b008ee07528f5ae416" + ) - @override_settings(MEDIA_URL='/media/') + @override_settings(MEDIA_URL="/media/") def test_image_with_local_media_url(self): """File URLs should use BASE_URL to form an absolute URL if MEDIA_URL is relative""" - with open(os.path.join(FIXTURES_DIR, 'wagtail.jpg'), 'rb') as f: + with open(os.path.join(FIXTURES_DIR, "wagtail.jpg"), "rb") as f: image = Image.objects.create( - title="Wagtail", - file=ImageFile(f, name='wagtail.jpg') + title="Wagtail", file=ImageFile(f, name="wagtail.jpg") ) - response = self.get({ - 'wagtailimages.image': [image.pk] - }) + response = self.get({"wagtailimages.image": [image.pk]}) self.assertEqual(response.status_code, 200) data = json.loads(response.content) - self.assertEqual(len(data['objects']), 1) - obj = data['objects'][0] - self.assertEqual(obj['fields']['file']['download_url'], 'http://example.com/media/original_images/wagtail.jpg') - self.assertEqual(obj['fields']['file']['size'], 1160) - self.assertEqual(obj['fields']['file']['hash'], '45c5db99aea04378498883b008ee07528f5ae416') + self.assertEqual(len(data["objects"]), 1) + obj = data["objects"][0] + self.assertEqual( + obj["fields"]["file"]["download_url"], + "http://example.com/media/original_images/wagtail.jpg", + ) + self.assertEqual(obj["fields"]["file"]["size"], 1160) + self.assertEqual( + obj["fields"]["file"]["hash"], "45c5db99aea04378498883b008ee07528f5ae416" + ) def test_document(self): - with open(os.path.join(FIXTURES_DIR, 'document.txt'), 'rb') as f: + with open(os.path.join(FIXTURES_DIR, "document.txt"), "rb") as f: document = Document.objects.create( - title="Test document", - file=File(f, name='document.txt') + title="Test document", file=File(f, name="document.txt") ) - response = self.get({ - 'wagtaildocs.document': [document.pk] - }) + response = self.get({"wagtaildocs.document": [document.pk]}) self.assertEqual(response.status_code, 200) data = json.loads(response.content) - self.assertEqual(len(data['objects']), 1) - obj = data['objects'][0] - self.assertEqual(obj['fields']['file']['download_url'], 'http://media.example.com/media/documents/document.txt') - self.assertEqual(obj['fields']['file']['size'], 33) - self.assertEqual(obj['fields']['file']['hash'], '9b90daf19b6e1e8a4852c64f9ea7fec5bcc5f7fb') + self.assertEqual(len(data["objects"]), 1) + obj = data["objects"][0] + self.assertEqual( + obj["fields"]["file"]["download_url"], + "http://media.example.com/media/documents/document.txt", + ) + self.assertEqual(obj["fields"]["file"]["size"], 33) + self.assertEqual( + obj["fields"]["file"]["hash"], "9b90daf19b6e1e8a4852c64f9ea7fec5bcc5f7fb" + ) def test_custom_model_with_file_field(self): - with open(os.path.join(FIXTURES_DIR, 'wagtail.jpg'), 'rb') as f: - avatar = Avatar.objects.create( - image=ImageFile(f, name='wagtail.jpg') - ) + with open(os.path.join(FIXTURES_DIR, "wagtail.jpg"), "rb") as f: + avatar = Avatar.objects.create(image=ImageFile(f, name="wagtail.jpg")) - response = self.get({ - 'tests.avatar': [avatar.pk] - }) + response = self.get({"tests.avatar": [avatar.pk]}) self.assertEqual(response.status_code, 200) data = json.loads(response.content) - self.assertEqual(len(data['objects']), 1) - obj = data['objects'][0] - self.assertEqual(obj['fields']['image']['download_url'], 'http://media.example.com/media/avatars/wagtail.jpg') - self.assertEqual(obj['fields']['image']['size'], 1160) - self.assertEqual(obj['fields']['image']['hash'], '45c5db99aea04378498883b008ee07528f5ae416') + self.assertEqual(len(data["objects"]), 1) + obj = data["objects"][0] + self.assertEqual( + obj["fields"]["image"]["download_url"], + "http://media.example.com/media/avatars/wagtail.jpg", + ) + self.assertEqual(obj["fields"]["image"]["size"], 1160) + self.assertEqual( + obj["fields"]["image"]["hash"], "45c5db99aea04378498883b008ee07528f5ae416" + ) -@mock.patch('requests.get') +@mock.patch("requests.get") class TestChooserProxyApi(TestCase): - fixtures = ['test.json'] + fixtures = ["test.json"] def setUp(self): - self.client.login(username='admin', password='password') + self.client.login(username="admin", password="password") def test(self, get): get.return_value.status_code = 200 - get.return_value.content = b'test content' + get.return_value.content = b"test content" - response = self.client.get('/admin/wagtail-transfer/api/chooser-proxy/staging/foo?bar=baz', HTTP_ACCEPT='application/json') + response = self.client.get( + "/admin/wagtail-transfer/api/chooser-proxy/staging/foo?bar=baz", + HTTP_ACCEPT="application/json", + ) - digest = digest_for_source('staging', 'bar=baz') + digest = digest_for_source("staging", "bar=baz") - get.assert_called_once_with(f'https://www.example.com/wagtail-transfer/api/chooser/pages/foo?bar=baz&digest={digest}', headers={'Accept': 'application/json'}, timeout=5) + get.assert_called_once_with( + f"https://www.example.com/wagtail-transfer/api/chooser/pages/foo?bar=baz&digest={digest}", + headers={"Accept": "application/json"}, + timeout=5, + ) self.assertEqual(response.status_code, 200) - self.assertEqual(response.content, b'test content') + self.assertEqual(response.content, b"test content") def test_with_unknown_source(self, get): get.return_value.status_code = 200 - get.return_value.content = b'test content' + get.return_value.content = b"test content" - response = self.client.get('/admin/wagtail-transfer/api/chooser-proxy/production/foo?bar=baz', HTTP_ACCEPT='application/json') + response = self.client.get( + "/admin/wagtail-transfer/api/chooser-proxy/production/foo?bar=baz", + HTTP_ACCEPT="application/json", + ) get.assert_not_called() diff --git a/tests/tests/test_import.py b/tests/tests/test_import.py index 2cfffa2..a5448c7 100644 --- a/tests/tests/test_import.py +++ b/tests/tests/test_import.py @@ -34,12 +34,12 @@ # We could use settings.MEDIA_ROOT here, but this way we avoid clobbering a real media folder if we # ever run these tests with non-test settings for any reason -TEST_MEDIA_DIR = os.path.join(os.path.join(settings.BASE_DIR, 'test-media')) -FIXTURES_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'fixtures') +TEST_MEDIA_DIR = os.path.join(os.path.join(settings.BASE_DIR, "test-media")) +FIXTURES_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "fixtures") class TestImport(TestCase): - fixtures = ['test.json'] + fixtures = ["test.json"] def setUp(self): shutil.rmtree(TEST_MEDIA_DIR, ignore_errors=True) @@ -75,10 +75,9 @@ def test_import_model(self): cats = Category.objects.all() self.assertEqual(cats.count(), 2) - def test_import_pages(self): # make a draft edit to the homepage - home = SimplePage.objects.get(slug='home') + home = SimplePage.objects.get(slug="home") home.title = "Draft home" home.save_revision() @@ -125,7 +124,7 @@ def test_import_pages(self): importer.add_json(data) importer.run() - updated_page = SimplePage.objects.get(url_path='/home/') + updated_page = SimplePage.objects.get(url_path="/home/") self.assertEqual(updated_page.intro, "This is the updated homepage") self.assertEqual(updated_page.title, "New home") self.assertEqual(updated_page.draft_title, "New home") @@ -135,12 +134,16 @@ def test_import_pages(self): self.assertEqual(updated_page_revision.intro, "This is the updated homepage") self.assertEqual(updated_page_revision.title, "New home") - created_page = SimplePage.objects.get(url_path='/home/imported-child-page/') - self.assertEqual(created_page.intro, "This page is imported from the source site") + created_page = SimplePage.objects.get(url_path="/home/imported-child-page/") + self.assertEqual( + created_page.intro, "This page is imported from the source site" + ) # An initial page revision should also be created self.assertTrue(created_page.get_latest_revision()) created_page_revision = created_page.get_latest_revision_as_object() - self.assertEqual(created_page_revision.intro, "This page is imported from the source site") + self.assertEqual( + created_page_revision.intro, "This page is imported from the source site" + ) def test_import_pages_with_fk(self): data = """{ @@ -235,19 +238,25 @@ def test_import_pages_with_fk(self): importer.add_json(data) importer.run() - updated_page = SponsoredPage.objects.get(url_path='/home/oil-is-still-great/') + updated_page = SponsoredPage.objects.get(url_path="/home/oil-is-still-great/") self.assertEqual(updated_page.intro, "yay fossil fuels and climate change") # advert is listed in WAGTAILTRANSFER_UPDATE_RELATED_MODELS, so changes to the advert should have been pulled in too self.assertEqual(updated_page.advert.slogan, "put a leopard in your tank") - self.assertEqual(updated_page.advert.run_until, datetime(2020, 12, 23, 21, 5, 43, tzinfo=timezone.utc)) + self.assertEqual( + updated_page.advert.run_until, + datetime(2020, 12, 23, 21, 5, 43, tzinfo=timezone.utc), + ) self.assertEqual(updated_page.advert.run_from, None) # author is not listed in WAGTAILTRANSFER_UPDATE_RELATED_MODELS, so should be left unchanged self.assertEqual(updated_page.author.bio, "Jack Kerouac's car has broken down.") - created_page = SponsoredPage.objects.get(url_path='/home/eggs-are-great-too/') + created_page = SponsoredPage.objects.get(url_path="/home/eggs-are-great-too/") self.assertEqual(created_page.intro, "you can make cakes with them") self.assertEqual(created_page.advert.slogan, "go to work on an egg") - self.assertEqual(created_page.advert.run_until, datetime(2020, 12, 23, 1, 23, 45, tzinfo=timezone.utc)) + self.assertEqual( + created_page.advert.run_until, + datetime(2020, 12, 23, 1, 23, 45, tzinfo=timezone.utc), + ) self.assertEqual(created_page.advert.run_from, None) def test_import_pages_with_orphaned_uid(self): @@ -304,18 +313,22 @@ def test_import_pages_with_orphaned_uid(self): importer.add_json(data) importer.run() - updated_page = SponsoredPage.objects.get(url_path='/home/oil-is-still-great/') + updated_page = SponsoredPage.objects.get(url_path="/home/oil-is-still-great/") # author should be recreated self.assertEqual(updated_page.author.name, "Edgar Allen Poe") - self.assertEqual(updated_page.author.bio, "Edgar Allen Poe has come back from the dead") + self.assertEqual( + updated_page.author.bio, "Edgar Allen Poe has come back from the dead" + ) # make sure it has't just overwritten the old author... self.assertTrue(Author.objects.filter(name="Jack Kerouac").exists()) # there should now be an IDMapping record for the previously orphaned UID, pointing to the # newly created author self.assertEqual( - IDMapping.objects.get(uid="b00cb00c-0000-0000-0000-00000de1e7ed").content_object, - updated_page.author + IDMapping.objects.get( + uid="b00cb00c-0000-0000-0000-00000de1e7ed" + ).content_object, + updated_page.author, ) def test_import_page_with_child_models(self): @@ -370,7 +383,7 @@ def test_import_page_with_child_models(self): importer.add_json(data) importer.run() - page = SectionedPage.objects.get(url_path='/home/how-to-boil-an-egg/') + page = SectionedPage.objects.get(url_path="/home/how-to-boil-an-egg/") self.assertEqual(page.sections.count(), 2) self.assertEqual(page.sections.first().title, "Boil the outside of the egg") @@ -499,7 +512,7 @@ def test_import_page_with_comments(self): importer.add_json(data) importer.run() - page = SimplePage.objects.get(url_path='/home/how-to-boil-an-egg/') + page = SimplePage.objects.get(url_path="/home/how-to-boil-an-egg/") self.assertEqual(page.wagtail_admin_comments.count(), 1) @@ -543,7 +556,9 @@ def test_import_page_with_rich_text_link(self): page = PageWithRichText.objects.get(slug="imported-rich-text-page") # tests that a page link id is changed successfully when imported - self.assertEqual(page.body, '

But I have a link

') + self.assertEqual( + page.body, '

But I have a link

' + ) # TODO: this should include an embed type as well once document/image import is added @@ -617,7 +632,7 @@ def test_do_not_import_pages_outside_of_selected_root(self): page = PageWithRichText.objects.get(slug="imported-rich-text-page") # tests that the page link tag is removed, as the page does not exist on the destination - self.assertEqual(page.body, '

But I have a link

') + self.assertEqual(page.body, "

But I have a link

") def test_import_page_with_streamfield_page_links(self): data = """{ @@ -657,7 +672,37 @@ def test_import_page_with_streamfield_page_links(self): imported_streamfield = page.body.stream_block.get_prep_value(page.body) # Check that PageChooserBlock ids are converted correctly to those on the destination site - self.assertEqual(imported_streamfield, [{'type': 'link_block', 'value': {'page': 1, 'text': 'Test'}, 'id': 'fc3b0d3d-d316-4271-9e31-84919558188a'}, {'type': 'page', 'value': 2, 'id': 'c6d07d3a-72d4-445e-8fa5-b34107291176'}, {'type': 'stream', 'value': [{'type': 'page', 'value': 3, 'id': '8c0d7de7-4f77-4477-be67-7d990d0bfb82'}], 'id': '21ffe52a-c0fc-4ecc-92f1-17b356c9cc94'}, {'type': 'list_of_pages', 'value': [5], 'id': '17b972cb-a952-4940-87e2-e4eb00703997'}]) + self.assertEqual( + imported_streamfield, + [ + { + "type": "link_block", + "value": {"page": 1, "text": "Test"}, + "id": "fc3b0d3d-d316-4271-9e31-84919558188a", + }, + { + "type": "page", + "value": 2, + "id": "c6d07d3a-72d4-445e-8fa5-b34107291176", + }, + { + "type": "stream", + "value": [ + { + "type": "page", + "value": 3, + "id": "8c0d7de7-4f77-4477-be67-7d990d0bfb82", + } + ], + "id": "21ffe52a-c0fc-4ecc-92f1-17b356c9cc94", + }, + { + "type": "list_of_pages", + "value": [5], + "id": "17b972cb-a952-4940-87e2-e4eb00703997", + }, + ], + ) def test_import_page_with_document_chooser_block(self): data = """{ @@ -699,14 +744,16 @@ def test_import_page_with_document_chooser_block(self): imported_streamfield, [ { - 'id': '17b972cb-a952-4940-87e2-e4eb00703997', - 'type': 'document', - 'value': 1, + "id": "17b972cb-a952-4940-87e2-e4eb00703997", + "type": "document", + "value": 1, }, ], ) - def test_import_page_with_streamfield_page_links_where_linked_pages_not_imported(self): + def test_import_page_with_streamfield_page_links_where_linked_pages_not_imported( + self + ): data = """{ "ids_for_import": [ ["wagtailcore.page", 6] @@ -744,17 +791,44 @@ def test_import_page_with_streamfield_page_links_where_linked_pages_not_imported imported_streamfield = page.body.stream_block.get_prep_value(page.body) # The PageChooserBlock has required=True, so when its value is removed, the block should also be removed - self.assertNotIn({'type': 'page', 'value': None, 'id': 'c6d07d3a-72d4-445e-8fa5-b34107291176'}, imported_streamfield) + self.assertNotIn( + { + "type": "page", + "value": None, + "id": "c6d07d3a-72d4-445e-8fa5-b34107291176", + }, + imported_streamfield, + ) # Test that 0 values are not removed, only None - self.assertIn({'type': 'integer', 'value': 0, 'id': 'aad07d3a-72d4-445e-8fa5-b34107291199'}, imported_streamfield) + self.assertIn( + { + "type": "integer", + "value": 0, + "id": "aad07d3a-72d4-445e-8fa5-b34107291199", + }, + imported_streamfield, + ) # By contrast, the PageChooserBlock in the link_block has required=False, so just the block's value should be removed instead - self.assertIn({'type': 'link_block', 'value': {'page': None, 'text': 'Test'}, 'id': 'fc3b0d3d-d316-4271-9e31-84919558188a'}, imported_streamfield) + self.assertIn( + { + "type": "link_block", + "value": {"page": None, "text": "Test"}, + "id": "fc3b0d3d-d316-4271-9e31-84919558188a", + }, + imported_streamfield, + ) # The ListBlock should now be empty, as the (required) PageChooserBlocks inside have had their values set to None - self.assertIn({'type': 'list_of_pages', 'value': [], 'id': '17b972cb-a952-4940-87e2-e4eb00703997'}, imported_streamfield) - + self.assertIn( + { + "type": "list_of_pages", + "value": [], + "id": "17b972cb-a952-4940-87e2-e4eb00703997", + }, + imported_streamfield, + ) def test_import_page_with_streamfield_rich_text_block(self): # Check that ids in RichTextBlock within a StreamField are converted properly @@ -764,11 +838,22 @@ def test_import_page_with_streamfield_rich_text_block(self): importer.add_json(data) importer.run() - page = PageWithStreamField.objects.get(slug="my-streamfield-rich-text-block-has-a-link") + page = PageWithStreamField.objects.get( + slug="my-streamfield-rich-text-block-has-a-link" + ) imported_streamfield = page.body.stream_block.get_prep_value(page.body) - self.assertEqual(imported_streamfield, [{'type': 'rich_text', 'value': '

I link to a page.

', 'id': '7d4ee3d4-9213-4319-b984-45be4ded8853'}]) + self.assertEqual( + imported_streamfield, + [ + { + "type": "rich_text", + "value": '

I link to a page.

', + "id": "7d4ee3d4-9213-4319-b984-45be4ded8853", + } + ], + ) def test_import_page_with_new_list_block_format(self): # Check that ids in a ListBlock with the uuid format within a StreamField are converted properly @@ -781,19 +866,34 @@ def test_import_page_with_new_list_block_format(self): imported_streamfield = page.body.stream_block.get_prep_value(page.body) - self.assertEqual(imported_streamfield, [{'type': 'list_of_captioned_pages', 'value': [{'type': 'item', 'value': {'page': 1, 'text': 'a caption'}, 'id': '8c0d7de7-4f77-4477-be67-7d990d0bfb82'}], 'id': '21ffe52a-c0fc-4ecc-92f1-17b356c9cc94'}]) + self.assertEqual( + imported_streamfield, + [ + { + "type": "list_of_captioned_pages", + "value": [ + { + "type": "item", + "value": {"page": 1, "text": "a caption"}, + "id": "8c0d7de7-4f77-4477-be67-7d990d0bfb82", + } + ], + "id": "21ffe52a-c0fc-4ecc-92f1-17b356c9cc94", + } + ], + ) - @mock.patch('requests.get') + @mock.patch("requests.get") def test_import_image_with_file(self, get): get.return_value.status_code = 200 - get.return_value.content = b'my test image file contents' + get.return_value.content = b"my test image file contents" IDMapping.objects.get_or_create( uid="f91cb31c-1751-11ea-8000-0800278dc04d", defaults={ - 'content_type': ContentType.objects.get_for_model(Collection), - 'local_id': Collection.objects.get().id, - } + "content_type": ContentType.objects.get_for_model(Collection), + "local_id": Collection.objects.get().id, + }, ) data = """{ @@ -849,16 +949,16 @@ def test_import_image_with_file(self, get): get.assert_called() image = Image.objects.get() self.assertEqual(image.title, "Lightnin' Hopkins") - self.assertEqual(image.file.read(), b'my test image file contents') + self.assertEqual(image.file.read(), b"my test image file contents") # TODO: We should verify these self.assertEqual(image.file_size, 18521) self.assertEqual(image.file_hash, "e4eab12cc50b6b9c619c9ddd20b61d8e6a961ada") - @mock.patch('requests.get') + @mock.patch("requests.get") def test_import_image_with_file_without_root_collection_mapping(self, get): get.return_value.status_code = 200 - get.return_value.content = b'my test image file contents' + get.return_value.content = b"my test image file contents" data = """{ "ids_for_import": [ @@ -913,7 +1013,7 @@ def test_import_image_with_file_without_root_collection_mapping(self, get): get.assert_called() image = Image.objects.get() self.assertEqual(image.title, "Lightnin' Hopkins") - self.assertEqual(image.file.read(), b'my test image file contents') + self.assertEqual(image.file.read(), b"my test image file contents") # It should be in the existing root collection (no new collection should be created) self.assertEqual(image.collection.name, "Root") @@ -923,7 +1023,7 @@ def test_import_image_with_file_without_root_collection_mapping(self, get): self.assertEqual(image.file_size, 18521) self.assertEqual(image.file_hash, "e4eab12cc50b6b9c619c9ddd20b61d8e6a961ada") - @mock.patch('requests.get') + @mock.patch("requests.get") def test_existing_image_is_not_refetched(self, get): """ If an incoming object has a FileField that reports the same size/hash as the existing @@ -931,20 +1031,19 @@ def test_existing_image_is_not_refetched(self, get): """ get.return_value.status_code = 200 - get.return_value.content = b'my test image file contents' + get.return_value.content = b"my test image file contents" - with open(os.path.join(FIXTURES_DIR, 'wagtail.jpg'), 'rb') as f: + with open(os.path.join(FIXTURES_DIR, "wagtail.jpg"), "rb") as f: image = Image.objects.create( - title="Wagtail", - file=ImageFile(f, name='wagtail.jpg') + title="Wagtail", file=ImageFile(f, name="wagtail.jpg") ) IDMapping.objects.get_or_create( uid="f91debc6-1751-11ea-8001-0800278dc04d", defaults={ - 'content_type': ContentType.objects.get_for_model(Image), - 'local_id': image.id, - } + "content_type": ContentType.objects.get_for_model(Image), + "local_id": image.id, + }, ) data = """{ @@ -1003,7 +1102,7 @@ def test_existing_image_is_not_refetched(self, get): # but file is left alone (i.e. it has not been replaced with 'my test image file contents') self.assertEqual(image.file.size, 1160) - @mock.patch('requests.get') + @mock.patch("requests.get") def test_replace_image(self, get): """ If an incoming object has a FileField that reports a different size/hash to the existing @@ -1011,20 +1110,19 @@ def test_replace_image(self, get): """ get.return_value.status_code = 200 - get.return_value.content = b'my test image file contents' + get.return_value.content = b"my test image file contents" - with open(os.path.join(FIXTURES_DIR, 'wagtail.jpg'), 'rb') as f: + with open(os.path.join(FIXTURES_DIR, "wagtail.jpg"), "rb") as f: image = Image.objects.create( - title="Wagtail", - file=ImageFile(f, name='wagtail.jpg') + title="Wagtail", file=ImageFile(f, name="wagtail.jpg") ) IDMapping.objects.get_or_create( uid="f91debc6-1751-11ea-8001-0800278dc04d", defaults={ - 'content_type': ContentType.objects.get_for_model(Image), - 'local_id': image.id, - } + "content_type": ContentType.objects.get_for_model(Image), + "local_id": image.id, + }, ) data = """{ @@ -1079,7 +1177,7 @@ def test_replace_image(self, get): get.assert_called() image = Image.objects.get() self.assertEqual(image.title, "A lovely wagtail") - self.assertEqual(image.file.read(), b'my test image file contents') + self.assertEqual(image.file.read(), b"my test image file contents") def test_import_collection(self): root_collection = Collection.objects.get() @@ -1087,17 +1185,20 @@ def test_import_collection(self): IDMapping.objects.get_or_create( uid="f91cb31c-1751-11ea-8000-0800278dc04d", defaults={ - 'content_type': ContentType.objects.get_for_model(Collection), - 'local_id': root_collection.id, - } + "content_type": ContentType.objects.get_for_model(Collection), + "local_id": root_collection.id, + }, ) - data = """{ + data = ( + """{ "ids_for_import": [ ["wagtailcore.collection", 4] ], "mappings": [ - ["wagtailcore.collection", """ + str(root_collection.id) + """, "f91cb31c-1751-11ea-8000-0800278dc04d"], + ["wagtailcore.collection", """ + + str(root_collection.id) + + """, "f91cb31c-1751-11ea-8000-0800278dc04d"], ["wagtailcore.collection", 4, "8a1d3afd-3fa2-4309-9dc7-6d31902174ca"] ], "objects": [ @@ -1107,10 +1208,13 @@ def test_import_collection(self): "fields": { "name": "New collection" }, - "parent_id": """ + str(root_collection.id) + """ + "parent_id": """ + + str(root_collection.id) + + """ } ] }""" + ) importer = ImportPlanner(root_page_source_pk=1, destination_parent_id=None) importer.add_json(data) @@ -1196,8 +1300,13 @@ def test_import_page_with_parental_many_to_many(self): self.assertEqual(set(page.ads.all()), {advert_2, advert_3}) # advert is listed in WAGTAILTRANSFER_UPDATE_RELATED_MODELS, so changes to the advert should have been pulled in too - self.assertEqual(advert_3.slogan, "Buy a half-scale authentically hydrogen-filled replica of the Hindenburg!") - self.assertEqual(advert_3.run_until, datetime(1937, 5, 6, 23, 25, 12, tzinfo=timezone.utc)) + self.assertEqual( + advert_3.slogan, + "Buy a half-scale authentically hydrogen-filled replica of the Hindenburg!", + ) + self.assertEqual( + advert_3.run_until, datetime(1937, 5, 6, 23, 25, 12, tzinfo=timezone.utc) + ) self.assertEqual(advert_3.run_from, None) def test_import_object_with_many_to_many(self): @@ -1233,8 +1342,13 @@ def test_import_object_with_many_to_many(self): self.assertEqual(set(ad_holder.ads.all()), {advert_2, advert_3}) # advert is listed in WAGTAILTRANSFER_UPDATE_RELATED_MODELS, so changes to the advert should have been pulled in too - self.assertEqual(advert_3.slogan, "Buy a half-scale authentically hydrogen-filled replica of the Hindenburg!") - self.assertEqual(advert_3.run_until, datetime(1937, 5, 6, 23, 25, 12, tzinfo=timezone.utc)) + self.assertEqual( + advert_3.slogan, + "Buy a half-scale authentically hydrogen-filled replica of the Hindenburg!", + ) + self.assertEqual( + advert_3.run_until, datetime(1937, 5, 6, 23, 25, 12, tzinfo=timezone.utc) + ) self.assertEqual(advert_3.run_from, None) def test_import_with_field_based_lookup(self): @@ -1298,11 +1412,13 @@ def test_import_with_field_based_lookup(self): importer.add_json(data) importer.run() - updated_page = SponsoredPage.objects.get(url_path='/home/oil-is-still-great/') + updated_page = SponsoredPage.objects.get(url_path="/home/oil-is-still-great/") # The 'Cars' category should have been matched by name to the existing record - self.assertEqual(updated_page.categories.get(name='Cars').colour, "red") + self.assertEqual(updated_page.categories.get(name="Cars").colour, "red") # The 'Environment' category should have been created - self.assertEqual(updated_page.categories.get(name='Environment').colour, "green") + self.assertEqual( + updated_page.categories.get(name="Environment").colour, "green" + ) def test_skip_import_if_hard_dependency_on_non_imported_page(self): data = """{ @@ -1426,22 +1542,36 @@ def test_skip_import_if_hard_dependency_on_non_imported_page(self): importer.run() # A non-nullable FK to an existing page outside the imported root is fine - redirect_to_oil_page = RedirectPage.objects.get(slug='redirect-to-oil-page') - self.assertEqual(redirect_to_oil_page.redirect_to.slug, 'oil-is-great') + redirect_to_oil_page = RedirectPage.objects.get(slug="redirect-to-oil-page") + self.assertEqual(redirect_to_oil_page.redirect_to.slug, "oil-is-great") # A non-nullable FK to a non-existing page outside the imported root will prevent import - self.assertFalse(RedirectPage.objects.filter(slug='redirect-to-unimported-page').exists()) + self.assertFalse( + RedirectPage.objects.filter(slug="redirect-to-unimported-page").exists() + ) # We can also handle FKs to pages being created in the import - redirect_to_redirect_to_oil_page = RedirectPage.objects.get(slug='redirect-to-redirect-to-oil-page') - self.assertEqual(redirect_to_redirect_to_oil_page.redirect_to.slug, 'redirect-to-oil-page') + redirect_to_redirect_to_oil_page = RedirectPage.objects.get( + slug="redirect-to-redirect-to-oil-page" + ) + self.assertEqual( + redirect_to_redirect_to_oil_page.redirect_to.slug, "redirect-to-oil-page" + ) # Failure to create a page will also propagate to pages with a hard dependency on it - self.assertFalse(RedirectPage.objects.filter(slug='redirect-to-redirect-to-unimported-page').exists()) + self.assertFalse( + RedirectPage.objects.filter( + slug="redirect-to-redirect-to-unimported-page" + ).exists() + ) # Circular references will be caught and pages not created - self.assertFalse(RedirectPage.objects.filter(slug='pork-redirecting-to-lamb').exists()) - self.assertFalse(RedirectPage.objects.filter(slug='lamb-redirecting-to-pork').exists()) + self.assertFalse( + RedirectPage.objects.filter(slug="pork-redirecting-to-lamb").exists() + ) + self.assertFalse( + RedirectPage.objects.filter(slug="lamb-redirecting-to-pork").exists() + ) def test_circular_references_in_rich_text(self): data = """{ @@ -1503,14 +1633,17 @@ def test_circular_references_in_rich_text(self): importer.run() # Both pages should have been created - bill_page = PageWithRichText.objects.get(slug='bill') - ben_page = PageWithRichText.objects.get(slug='ben') + bill_page = PageWithRichText.objects.get(slug="bill") + ben_page = PageWithRichText.objects.get(slug="ben") # At least one of them (i.e. the second one to be created) should have a valid link to the other self.assertTrue( - bill_page.body == """

Have you met my friend Ben?

""" % ben_page.id - or - ben_page.body == """

Have you met my friend Bill?

""" % bill_page.id + bill_page.body + == """

Have you met my friend Ben?

""" + % ben_page.id + or ben_page.body + == """

Have you met my friend Bill?

""" + % bill_page.id ) def test_omitting_references_in_m2m_relations(self): @@ -1574,13 +1707,15 @@ def test_omitting_references_in_m2m_relations(self): importer.add_json(data) importer.run() - salad_dressing_page = PageWithRelatedPages.objects.get(slug='salad-dressing') - oil_page = Page.objects.get(slug='oil-is-great') - vinegar_page = Page.objects.get(slug='vinegar') + salad_dressing_page = PageWithRelatedPages.objects.get(slug="salad-dressing") + oil_page = Page.objects.get(slug="oil-is-great") + vinegar_page = Page.objects.get(slug="vinegar") # salad_dressing_page's related_pages should include the oil (id=30) and vinegar (id=21) # pages, but not the missing and not-to-be-imported page id=31 - self.assertEqual(set(salad_dressing_page.related_pages.all()), {oil_page, vinegar_page}) + self.assertEqual( + set(salad_dressing_page.related_pages.all()), {oil_page, vinegar_page} + ) def test_import_with_soft_dependency_on_grandchild(self): # https://github.com/wagtail/wagtail-transfer/issues/84 - @@ -1650,7 +1785,7 @@ def test_import_with_soft_dependency_on_grandchild(self): # iterates over it, it gets back a known 'worst case' ordering as defined by the page # titles. importer.operations = list(importer.operations) - importer.operations.sort(key=lambda op: op.object_data['fields']['title']) + importer.operations.sort(key=lambda op: op.object_data["fields"]["title"]) importer.run() @@ -1661,12 +1796,12 @@ def test_import_with_soft_dependency_on_grandchild(self): # link from homepage has to be broken page = PageWithRichText.objects.get(slug="level-1-page") - self.assertEqual(page.body, '

link to level 3

') + self.assertEqual(page.body, "

link to level 3

") - @mock.patch('requests.get') + @mock.patch("requests.get") def test_import_custom_file_field(self, get): get.return_value.status_code = 200 - get.return_value.content = b'my test image file contents' + get.return_value.content = b"my test image file contents" data = """{ "ids_for_import": [ @@ -1697,7 +1832,7 @@ def test_import_custom_file_field(self, get): # Check the db record and file was imported get.assert_called() avatar = Avatar.objects.get() - self.assertEqual(avatar.image.read(), b'my test image file contents') + self.assertEqual(avatar.image.read(), b"my test image file contents") def test_import_multi_table_model(self): # test that importing a model using multi table inheritance correctly imports the child model, not just the parent @@ -1730,7 +1865,10 @@ def test_import_multi_table_model(self): imported_ad = LongAdvert.objects.filter(id=4).first() self.assertIsNotNone(imported_ad) self.assertEqual(imported_ad.slogan, "test") - self.assertEqual(imported_ad.run_until, datetime(2020, 12, 23, 12, 34, 56, tzinfo=timezone.utc)) + self.assertEqual( + imported_ad.run_until, + datetime(2020, 12, 23, 12, 34, 56, tzinfo=timezone.utc), + ) self.assertEqual(imported_ad.description, "longertest") def test_import_model_with_generic_foreign_key(self): @@ -1831,11 +1969,16 @@ def test_import_model_with_deleted_reverse_related_models(self): self.assertIsNotNone(imported_ad) self.assertIsNone(imported_ad.tags.first()) - @override_settings(WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS=[('tests.advert', 'tagged_items', False)]) + @override_settings( + WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS=[ + ("tests.advert", "tagged_items", False) + ] + ) def test_import_model_with_untracked_deleted_reverse_related_models(self): # test re-importing a model where WAGTAILTRANFER_FOLLOWED_REVERSE_RELATIONS is not used to track tag deletions # will not delete tags from wagtail_transfer import field_adapters + importlib.reload(field_adapters) # force reload field adapters as followed/deleted variables are set on module load, so will not get new setting data = """{ diff --git a/tests/tests/test_views.py b/tests/tests/test_views.py index bfbe095..160949f 100644 --- a/tests/tests/test_views.py +++ b/tests/tests/test_views.py @@ -15,24 +15,24 @@ class TestChooseView(TestCase): - fixtures = ['test.json'] + fixtures = ["test.json"] def setUp(self): - self.client.login(username='admin', password='password') + self.client.login(username="admin", password="password") def test_get(self): - response = self.client.get('/admin/wagtail-transfer/choose/') + response = self.client.get("/admin/wagtail-transfer/choose/") self.assertEqual(response.status_code, 200) self.assertContains(response, 'data-wagtail-component="content-import-form"') -@mock.patch('requests.post') -@mock.patch('requests.get') +@mock.patch("requests.post") +@mock.patch("requests.get") class TestImportView(TestCase): - fixtures = ['test.json'] + fixtures = ["test.json"] def setUp(self): - self.client.login(username='admin', password='password') + self.client.login(username="admin", password="password") def test_run(self, get, post): get.return_value.status_code = 200 @@ -126,38 +126,51 @@ def test_run(self, get, post): ] }""" - response = self.client.post('/admin/wagtail-transfer/import/', { - 'source': 'staging', - 'source_page_id': '12', - 'dest_page_id': '2', - }) - self.assertRedirects(response, '/admin/pages/2/') + response = self.client.post( + "/admin/wagtail-transfer/import/", + { + "source": "staging", + "source_page_id": "12", + "dest_page_id": "2", + }, + ) + self.assertRedirects(response, "/admin/pages/2/") # Pages API should be called once, with 12 as the root page get.assert_called_once() args, kwargs = get.call_args - self.assertEqual(args[0], 'https://www.example.com/wagtail-transfer/api/pages/12/') - self.assertIn('digest', kwargs['params']) + self.assertEqual( + args[0], "https://www.example.com/wagtail-transfer/api/pages/12/" + ) + self.assertIn("digest", kwargs["params"]) # then the Objects API should be called, requesting adverts with ID 11 and 8 post.assert_called_once() args, kwargs = post.call_args - self.assertEqual(args[0], 'https://www.example.com/wagtail-transfer/api/objects/') - self.assertIn('digest', kwargs['params']) - requested_ids = json.loads(kwargs['data'])['tests.advert'] + self.assertEqual( + args[0], "https://www.example.com/wagtail-transfer/api/objects/" + ) + self.assertIn("digest", kwargs["params"]) + requested_ids = json.loads(kwargs["data"])["tests.advert"] self.assertEqual(set(requested_ids), {8, 11}) # Check import results - updated_page = SponsoredPage.objects.get(url_path='/home/oil-is-still-great/') + updated_page = SponsoredPage.objects.get(url_path="/home/oil-is-still-great/") self.assertEqual(updated_page.intro, "yay fossil fuels and climate change") self.assertEqual(updated_page.advert.slogan, "put a leopard in your tank") - self.assertEqual(updated_page.advert.run_until, datetime(2020, 12, 23, 1, 23, 45, tzinfo=timezone.utc)) + self.assertEqual( + updated_page.advert.run_until, + datetime(2020, 12, 23, 1, 23, 45, tzinfo=timezone.utc), + ) self.assertEqual(updated_page.advert.run_from, date(2020, 1, 21)) - created_page = SponsoredPage.objects.get(url_path='/home/eggs-are-great-too/') + created_page = SponsoredPage.objects.get(url_path="/home/eggs-are-great-too/") self.assertEqual(created_page.intro, "you can make cakes with them") self.assertEqual(created_page.advert.slogan, "go to work on an egg") - self.assertEqual(created_page.advert.run_until, datetime(2020, 1, 23, 1, 23, 45, tzinfo=timezone.utc)) + self.assertEqual( + created_page.advert.run_until, + datetime(2020, 1, 23, 1, 23, 45, tzinfo=timezone.utc), + ) self.assertEqual(created_page.advert.run_from, None) def test_missing_related_object(self, get, post): @@ -247,53 +260,65 @@ def test_missing_related_object(self, get, post): ] }""" - response = self.client.post('/admin/wagtail-transfer/import/', { - 'source': 'staging', - 'source_page_id': '12', - 'dest_page_id': '2', - }) - self.assertRedirects(response, '/admin/pages/2/') + response = self.client.post( + "/admin/wagtail-transfer/import/", + { + "source": "staging", + "source_page_id": "12", + "dest_page_id": "2", + }, + ) + self.assertRedirects(response, "/admin/pages/2/") # Pages API should be called once, with 12 as the root page get.assert_called_once() args, kwargs = get.call_args - self.assertEqual(args[0], 'https://www.example.com/wagtail-transfer/api/pages/12/') - self.assertIn('digest', kwargs['params']) + self.assertEqual( + args[0], "https://www.example.com/wagtail-transfer/api/pages/12/" + ) + self.assertIn("digest", kwargs["params"]) # then the Objects API should be called, requesting adverts with ID 11 and 8 post.assert_called_once() args, kwargs = post.call_args - self.assertEqual(args[0], 'https://www.example.com/wagtail-transfer/api/objects/') - self.assertIn('digest', kwargs['params']) - requested_ids = json.loads(kwargs['data'])['tests.advert'] + self.assertEqual( + args[0], "https://www.example.com/wagtail-transfer/api/objects/" + ) + self.assertIn("digest", kwargs["params"]) + requested_ids = json.loads(kwargs["data"])["tests.advert"] self.assertEqual(set(requested_ids), {8, 11}) # Check import results - updated_page = SponsoredPage.objects.get(url_path='/home/oil-is-still-great/') + updated_page = SponsoredPage.objects.get(url_path="/home/oil-is-still-great/") self.assertEqual(updated_page.intro, "yay fossil fuels and climate change") self.assertEqual(updated_page.advert.slogan, "put a leopard in your tank") - self.assertEqual(updated_page.advert.run_until, datetime(2020, 12, 23, 1, 23, 45, tzinfo=timezone.utc)) + self.assertEqual( + updated_page.advert.run_until, + datetime(2020, 12, 23, 1, 23, 45, tzinfo=timezone.utc), + ) self.assertEqual(updated_page.advert.run_from, None) # The egg advert was missing in the object-api response, and the FK on SponsoredPage is # nullable, so it should create the egg page without the advert - created_page = SponsoredPage.objects.get(url_path='/home/eggs-are-great-too/') + created_page = SponsoredPage.objects.get(url_path="/home/eggs-are-great-too/") self.assertEqual(created_page.intro, "you can make cakes with them") self.assertEqual(created_page.advert, None) def test_list_snippet_models(self, get, post): # Test the model chooser view. get_params = "models=True" - digest = digest_for_source('local', get_params) - response = self.client.get(f"https://www.example.com/wagtail-transfer/api/chooser/models/?{get_params}&digest={digest}") + digest = digest_for_source("local", get_params) + response = self.client.get( + f"https://www.example.com/wagtail-transfer/api/chooser/models/?{get_params}&digest={digest}" + ) self.assertEqual(response.status_code, 200) content = json.loads(response.content.decode("utf-8")) - self.assertEqual(content['meta']['total_count'], 1) + self.assertEqual(content["meta"]["total_count"], 1) - snippet = content['items'][0] - self.assertEqual(snippet['model_label'], 'tests.category') - self.assertEqual(snippet['name'], 'Category') + snippet = content["items"][0] + self.assertEqual(snippet["model_label"], "tests.category") + self.assertEqual(snippet["name"], "Category") class ImportPermissionsTests(TestCase): @@ -302,11 +327,13 @@ class ImportPermissionsTests(TestCase): def setUp(self): idmapping_content_type = ContentType.objects.get_for_model(IDMapping) can_import_permission = Permission.objects.get( - content_type=idmapping_content_type, codename="wagtailtransfer_can_import", + content_type=idmapping_content_type, + codename="wagtailtransfer_can_import", ) can_access_admin_permission = Permission.objects.get( content_type=ContentType.objects.get( - app_label="wagtailadmin", model="admin", + app_label="wagtailadmin", + model="admin", ), codename="access_admin", ) @@ -318,7 +345,9 @@ def setUp(self): editors = Group.objects.get(name="Editors") self.superuser = User.objects.create_superuser( - username="superuser", email="superuser@example.com", password="password", + username="superuser", + email="superuser@example.com", + password="password", ) self.inactive_superuser = User.objects.create_superuser( username="inactivesuperuser", @@ -379,7 +408,6 @@ def setUp(self): ] def _test_view(self, method, url, data=None, success_url=None): - for user in self.permitted_users: with self.subTest(user=user): self.client.login(username=user.username, password="password") diff --git a/tests/urls.py b/tests/urls.py index de4c2a4..49cc456 100644 --- a/tests/urls.py +++ b/tests/urls.py @@ -1,4 +1,3 @@ - from django.urls import include, re_path from wagtail import urls as wagtail_urls from wagtail.admin import urls as wagtailadmin_urls @@ -7,10 +6,9 @@ urlpatterns = [ - re_path(r'^admin/', include(wagtailadmin_urls)), - re_path(r'^wagtail-transfer/', include(wagtailtransfer_urls)), - + re_path(r"^admin/", include(wagtailadmin_urls)), + re_path(r"^wagtail-transfer/", include(wagtailtransfer_urls)), # For anything not caught by a more specific rule above, hand over to # Wagtail's serving mechanism - re_path(r'', include(wagtail_urls)), + re_path(r"", include(wagtail_urls)), ] diff --git a/wagtail_transfer/admin_urls.py b/wagtail_transfer/admin_urls.py index ffe99fb..f403652 100644 --- a/wagtail_transfer/admin_urls.py +++ b/wagtail_transfer/admin_urls.py @@ -5,14 +5,21 @@ from .vendor.wagtail_api_v2.router import WagtailAPIRouter -chooser_api = WagtailAPIRouter('wagtail_transfer_admin:page_chooser_api') -chooser_api.register_endpoint('pages', views.PageChooserAPIViewSet) +chooser_api = WagtailAPIRouter("wagtail_transfer_admin:page_chooser_api") +chooser_api.register_endpoint("pages", views.PageChooserAPIViewSet) -app_name = 'wagtail_transfer_admin' +app_name = "wagtail_transfer_admin" urlpatterns = [ - re_path(r'^choose/$', views.choose_page, name='choose_page'), - re_path(r'^import/$', views.do_import, name='import'), - re_path(r'^api/chooser-local/', (chooser_api.urls[0], 'page_chooser_api', 'page_chooser_api')), - re_path(r'^api/chooser-proxy/(\w+)/([\w\-/]*)$', views.chooser_api_proxy, name='chooser_api_proxy'), - re_path(r'^api/check_uid/$', views.check_page_existence_for_uid, name='check_uid'), + re_path(r"^choose/$", views.choose_page, name="choose_page"), + re_path(r"^import/$", views.do_import, name="import"), + re_path( + r"^api/chooser-local/", + (chooser_api.urls[0], "page_chooser_api", "page_chooser_api"), + ), + re_path( + r"^api/chooser-proxy/(\w+)/([\w\-/]*)$", + views.chooser_api_proxy, + name="chooser_api_proxy", + ), + re_path(r"^api/check_uid/$", views.check_page_existence_for_uid, name="check_uid"), ] diff --git a/wagtail_transfer/apps.py b/wagtail_transfer/apps.py index 4f3a452..46320c6 100644 --- a/wagtail_transfer/apps.py +++ b/wagtail_transfer/apps.py @@ -2,5 +2,5 @@ class WagtailTransferAppConfig(AppConfig): - name = 'wagtail_transfer' - default_auto_field = 'django.db.models.AutoField' + name = "wagtail_transfer" + default_auto_field = "django.db.models.AutoField" diff --git a/wagtail_transfer/auth.py b/wagtail_transfer/auth.py index d022842..595766a 100644 --- a/wagtail_transfer/auth.py +++ b/wagtail_transfer/auth.py @@ -6,19 +6,23 @@ from django.core.exceptions import PermissionDenied -GROUP_QUERY_WITH_DIGEST = re.compile('(?P.*?)&?digest=(?P[^&]*)(?P.*)') +GROUP_QUERY_WITH_DIGEST = re.compile( + "(?P.*?)&?digest=(?P[^&]*)(?P.*)" +) + def check_get_digest_wrapper(view_func): """ Check the digest of a request matches its GET parameters This is useful when wrapping vendored API views """ + def decorated_view(request, *args, **kwargs): - query_string = request.META.get('QUERY_STRING', '') + query_string = request.META.get("QUERY_STRING", "") match = GROUP_QUERY_WITH_DIGEST.match(query_string) if not match: raise PermissionDenied - digest = match.group('digest') + digest = match.group("digest") message = f'{match.group("query_before")}{match.group("query_after")}' if not (digest and message): # This decorator is intended for use with GET parameters @@ -28,16 +32,17 @@ def decorated_view(request, *args, **kwargs): # Unfortunately the admin API views won't allow unknown GET parameters # So we must remove the digest parameter from the request as well - request.META['QUERY_STRING'] = message + request.META["QUERY_STRING"] = message # Normally request.GET shouldn't be evaluated yet, but in case someone's # inspecting it in middleware for example, let's remove the cached version, # otherwise it will retain the old digest parameter - if hasattr(request, 'GET'): + if hasattr(request, "GET"): del request.GET response = view_func(request, *args, **kwargs) return response + return decorated_view @@ -57,7 +62,7 @@ def check_digest(message, digest): def digest_for_source(source, message): - key = settings.WAGTAILTRANSFER_SOURCES[source]['SECRET_KEY'] + key = settings.WAGTAILTRANSFER_SOURCES[source]["SECRET_KEY"] # Key and message must be bytes objects if isinstance(key, str): diff --git a/wagtail_transfer/field_adapters.py b/wagtail_transfer/field_adapters.py index 7c2d7af..0f63686 100644 --- a/wagtail_transfer/field_adapters.py +++ b/wagtail_transfer/field_adapters.py @@ -23,16 +23,22 @@ from .streamfield import get_object_references, update_object_ids -WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS = getattr(settings, "WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS", [('wagtailimages.image', 'tagged_items', True)]) +WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS = getattr( + settings, + "WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS", + [("wagtailimages.image", "tagged_items", True)], +) FOLLOWED_REVERSE_RELATIONS = { - (model_label.lower(), relation.lower()) for model_label, relation, _ in WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS + (model_label.lower(), relation.lower()) + for model_label, relation, _ in WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS } DELETED_REVERSE_RELATIONS = { - (model_label.lower(), relation.lower()) for model_label, relation, track_deletions in WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS if track_deletions + (model_label.lower(), relation.lower()) + for model_label, relation, track_deletions in WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS + if track_deletions } ADMIN_BASE_URL = getattr( - settings, "WAGTAILADMIN_BASE_URL", - getattr(settings, "BASE_URL", None) + settings, "WAGTAILADMIN_BASE_URL", getattr(settings, "BASE_URL", None) ) @@ -114,7 +120,6 @@ def get_objects_to_serialize(self, instance): return set() - class ForeignKeyAdapter(FieldAdapter): def __init__(self, field): super().__init__(field) @@ -186,10 +191,18 @@ def update_object_references(self, value, destination_ids_by_source): def populate_field(self, instance, value, context): model_id, content_type = None, None if value: - model_path, model_id = self.update_object_references(value, context.destination_ids_by_source) - content_type = ContentType.objects.get_by_natural_key(*model_path.split('.')) - - setattr(instance, instance._meta.get_field(self.field.ct_field).get_attname(), content_type.pk) + model_path, model_id = self.update_object_references( + value, context.destination_ids_by_source + ) + content_type = ContentType.objects.get_by_natural_key( + *model_path.split(".") + ) + + setattr( + instance, + instance._meta.get_field(self.field.ct_field).get_attname(), + content_type.pk, + ) setattr(instance, self.field.fk_field, model_id) def get_managed_fields(self): @@ -199,29 +212,42 @@ def get_managed_fields(self): class ManyToOneRelAdapter(FieldAdapter): def __init__(self, field): super().__init__(field) - self.related_field = getattr(field, 'field', None) or getattr(field, 'remote_field', None) + self.related_field = getattr(field, "field", None) or getattr( + field, "remote_field", None + ) self.related_base_model = get_base_model(field.related_model) self.is_parental = isinstance(self.related_field, ParentalKey) - self.is_followed = (get_base_model(self.field.model)._meta.label_lower, self.name) in FOLLOWED_REVERSE_RELATIONS + self.is_followed = ( + get_base_model(self.field.model)._meta.label_lower, + self.name, + ) in FOLLOWED_REVERSE_RELATIONS def _get_related_objects(self, instance): return getattr(instance, self.name).all() def serialize(self, instance): if self.is_parental or self.is_followed: - return list(self._get_related_objects(instance).values_list('pk', flat=True)) + return list( + self._get_related_objects(instance).values_list("pk", flat=True) + ) def get_object_references(self, instance): refs = set() if self.is_parental or self.is_followed: - for pk in self._get_related_objects(instance).values_list('pk', flat=True): + for pk in self._get_related_objects(instance).values_list("pk", flat=True): refs.add((self.related_base_model, pk)) return refs def get_object_deletions(self, instance, value, context): - if (self.is_parental or (get_base_model(self.field.model)._meta.label_lower, self.name) in DELETED_REVERSE_RELATIONS): + if ( + self.is_parental + or (get_base_model(self.field.model)._meta.label_lower, self.name) + in DELETED_REVERSE_RELATIONS + ): value = value or [] - uids = {context.uids_by_source[(self.related_base_model, pk)] for pk in value} + uids = { + context.uids_by_source[(self.related_base_model, pk)] for pk in value + } # delete any related objects on the existing object if they can't be mapped back # to one of the uids in the new set locator = get_locator_for_model(self.related_base_model) @@ -231,7 +257,11 @@ def get_object_deletions(self, instance, value, context): if child is not None: matched_destination_ids.add(child.pk) - return {child for child in self._get_related_objects(instance) if child.pk not in matched_destination_ids} + return { + child + for child in self._get_related_objects(instance) + if child.pk not in matched_destination_ids + } return set() def get_objects_to_serialize(self, instance): @@ -245,7 +275,9 @@ def populate_field(self, instance, value, context): class RichTextAdapter(FieldAdapter): def get_object_references(self, instance): - return get_reference_handler().get_objects(self.field.value_from_object(instance)) + return get_reference_handler().get_objects( + self.field.value_from_object(instance) + ) def get_dependencies(self, value): return { @@ -264,7 +296,9 @@ def __init__(self, field): def get_object_references(self, instance): # get the list of dicts representation of the streamfield json - stream = self.stream_block.get_prep_value(self.field.value_from_object(instance)) + stream = self.stream_block.get_prep_value( + self.field.value_from_object(instance) + ) return get_object_references(self.stream_block, stream) def get_dependencies(self, value): @@ -274,7 +308,11 @@ def get_dependencies(self, value): } def update_object_references(self, value, destination_ids_by_source): - return json.dumps(update_object_ids(self.stream_block, json.loads(value), destination_ids_by_source)) + return json.dumps( + update_object_ids( + self.stream_block, json.loads(value), destination_ids_by_source + ) + ) class FileAdapter(FieldAdapter): @@ -283,39 +321,40 @@ def serialize(self, instance): if not value: return None url = value.url - if url.startswith('/'): + if url.startswith("/"): # Using a relative media url. ie. /media/ # Prepend the BASE_URL to turn this into an absolute URL if ADMIN_BASE_URL is None: raise ImproperlyConfigured( "A WAGTAILADMIN_BASE_URL or BASE_URL setting must be provided when importing files" ) - url = ADMIN_BASE_URL.rstrip('/') + url + url = ADMIN_BASE_URL.rstrip("/") + url return { - 'download_url': url, - 'size': get_file_size(self.field, instance), - 'hash': get_file_hash(self.field, instance), + "download_url": url, + "size": get_file_size(self.field, instance), + "hash": get_file_hash(self.field, instance), } def populate_field(self, instance, value, context): if not value: return None - imported_file = context.imported_files_by_source_url.get(value['download_url']) + imported_file = context.imported_files_by_source_url.get(value["download_url"]) if imported_file is None: - existing_file = self.field.value_from_object(instance) if existing_file: existing_file_hash = get_file_hash(self.field, instance) - if existing_file_hash == value['hash']: + if existing_file_hash == value["hash"]: # File not changed, so don't bother updating it return # Get the local filename - name = pathlib.PurePosixPath(urlparse(value['download_url']).path).name + name = pathlib.PurePosixPath(urlparse(value["download_url"]).path).name local_filename = self.field.generate_filename(instance, name) - _file = File(local_filename, value['size'], value['hash'], value['download_url']) + _file = File( + local_filename, value["size"], value["hash"], value["download_url"] + ) try: imported_file = _file.transfer() except FileTransferError: @@ -383,7 +422,7 @@ def __init__(self): def _scan_for_adapters(self): adapters = dict(self.BASE_ADAPTERS_BY_FIELD_CLASS) - for fn in hooks.get_hooks('register_field_adapters'): + for fn in hooks.get_hooks("register_field_adapters"): adapters.update(fn()) self.adapters_by_field_class = adapters diff --git a/wagtail_transfer/files.py b/wagtail_transfer/files.py index 539beae..4200de3 100644 --- a/wagtail_transfer/files.py +++ b/wagtail_transfer/files.py @@ -27,12 +27,12 @@ def open_file(field, file): is_local = False if is_local: - f.open('rb') + f.open("rb") else: # Some external storage backends don't allow reopening # the file. Get a fresh file instance. #1397 storage = field.storage - f = storage.open(f.name, 'rb') + f = storage.open(f.name, "rb") close_file = True @@ -53,7 +53,8 @@ def get_file_size(field, instance): # Cases we know about from wagtail.documents.models import AbstractDocument from wagtail.images.models import AbstractImage - if isinstance(instance, (AbstractDocument, AbstractImage)) and field.name == 'file': + + if isinstance(instance, (AbstractDocument, AbstractImage)) and field.name == "file": return instance.get_file_size() # Allow developers to provide a file size getter for custom file fields @@ -74,7 +75,8 @@ def get_file_hash(field, instance): # Cases we know about from wagtail.documents.models import AbstractDocument from wagtail.images.models import AbstractImage - if isinstance(instance, (AbstractDocument, AbstractImage)) and field.name == 'file': + + if isinstance(instance, (AbstractDocument, AbstractImage)) and field.name == "file": return instance.get_file_hash() # Allow developers to provide a file hash getter for custom file fields @@ -98,6 +100,7 @@ class File: Note that local_filename is only a guideline, it may be changed to avoid conflict with an existing file """ + def __init__(self, local_filename, size, hash, source_url): self.local_filename = local_filename self.size = size diff --git a/wagtail_transfer/locators.py b/wagtail_transfer/locators.py index 8468f81..7e4119d 100644 --- a/wagtail_transfer/locators.py +++ b/wagtail_transfer/locators.py @@ -21,11 +21,13 @@ # dict of models that should be located by field values using FieldLocator, # rather than by UUID mapping LOOKUP_FIELDS = { - 'taggit.tag': ['slug'], # sensible default for taggit; can still be overridden - 'wagtailcore.locale': ["language_code"], - 'contenttypes.contenttype': ['app_label', 'model'], + "taggit.tag": ["slug"], # sensible default for taggit; can still be overridden + "wagtailcore.locale": ["language_code"], + "contenttypes.contenttype": ["app_label", "model"], } -for model_label, fields in getattr(settings, 'WAGTAILTRANSFER_LOOKUP_FIELDS', {}).items(): +for model_label, fields in getattr( + settings, "WAGTAILTRANSFER_LOOKUP_FIELDS", {} +).items(): LOOKUP_FIELDS[model_label.lower()] = fields @@ -61,7 +63,7 @@ def get_uid_for_local_id(self, id, create=True): id_mapping, created = IDMapping.objects.get_or_create( content_type=self.content_type, local_id=id, - defaults={'uid': uuid.uuid1(clock_seq=UUID_SEQUENCE)} + defaults={"uid": uuid.uuid1(clock_seq=UUID_SEQUENCE)}, ) UUID_SEQUENCE += 1 @@ -70,8 +72,7 @@ def get_uid_for_local_id(self, id, create=True): """Get UID for the instance with the given ID (returning None if one doesn't exist)""" try: id_mapping = IDMapping.objects.get( - content_type=self.content_type, - local_id=id + content_type=self.content_type, local_id=id ) return id_mapping.uid except IDMapping.DoesNotExist: @@ -90,7 +91,8 @@ def attach_uid(self, instance, uid): # use update_or_create to account for the possibility of an existing IDMapping for the same # UID, left over from the object being previously imported and then deleted IDMapping.objects.update_or_create( - uid=uid, defaults={'content_type': self.content_type, 'local_id': instance.pk} + uid=uid, + defaults={"content_type": self.content_type, "local_id": instance.pk}, ) def uid_from_json(self, json_uid): diff --git a/wagtail_transfer/management/commands/preseed_transfer_table.py b/wagtail_transfer/management/commands/preseed_transfer_table.py index 27be2f4..b1d0e13 100644 --- a/wagtail_transfer/management/commands/preseed_transfer_table.py +++ b/wagtail_transfer/management/commands/preseed_transfer_table.py @@ -10,21 +10,28 @@ # Namespace UUID common to all wagtail-transfer installances, used with uuid5 to generate # a predictable UUID for any given model-name / PK combination -NAMESPACE = uuid.UUID('418b5168-5a10-11ea-a84b-7831c1c42e66') +NAMESPACE = uuid.UUID("418b5168-5a10-11ea-a84b-7831c1c42e66") class Command(BaseCommand): help = "Pre-seed ID mappings used for content transfer" def add_arguments(self, parser): - parser.add_argument('labels', metavar='model_or_app', nargs='+', help="Model (as app_label.model_name) or app name to populate table entries for, e.g. wagtailcore.Page or wagtailcore") - parser.add_argument('--range', help="Range of IDs to create mappings for (e.g. 1-1000)") + parser.add_argument( + "labels", + metavar="model_or_app", + nargs="+", + help="Model (as app_label.model_name) or app name to populate table entries for, e.g. wagtailcore.Page or wagtailcore", + ) + parser.add_argument( + "--range", help="Range of IDs to create mappings for (e.g. 1-1000)" + ) def handle(self, *args, **options): models = [] - for label in options['labels']: + for label in options["labels"]: label = label.lower() - if '.' in label: + if "." in label: # interpret as a model try: model = get_model_for_path(label) @@ -33,7 +40,8 @@ def handle(self, *args, **options): if model != get_base_model(model): raise CommandError( - "%r is not a valid model for ID mappings, as it is a subclass using multi-table inheritance." % label + "%r is not a valid model for ID mappings, as it is a subclass using multi-table inheritance." + % label ) models.append(model) @@ -55,27 +63,32 @@ def handle(self, *args, **options): content_type = ContentType.objects.get_for_model(model) # find IDs of instances of this model that already exist in the IDMapping table - mapped_ids = IDMapping.objects.filter(content_type=content_type).values_list('local_id', flat=True) + mapped_ids = IDMapping.objects.filter( + content_type=content_type + ).values_list("local_id", flat=True) # these will be returned as strings, so convert to the pk field's native type mapped_ids = [model._meta.pk.to_python(id) for id in mapped_ids] # find IDs of instances not in this set - unmapped_ids = model.objects.exclude(pk__in=mapped_ids).values_list('pk', flat=True) + unmapped_ids = model.objects.exclude(pk__in=mapped_ids).values_list( + "pk", flat=True + ) # apply ID range filter if passed - if options['range']: - min_id, max_id = options['range'].split('-') + if options["range"]: + min_id, max_id = options["range"].split("-") unmapped_ids = unmapped_ids.filter(pk__gte=min_id, pk__lte=max_id) # create ID mapping for each of these for pk in unmapped_ids: _, created = IDMapping.objects.get_or_create( - content_type=content_type, local_id=pk, - defaults={'uid': uuid.uuid5(NAMESPACE, f"{model_name}:{pk}")} + content_type=content_type, + local_id=pk, + defaults={"uid": uuid.uuid5(NAMESPACE, f"{model_name}:{pk}")}, ) if created: created_count += 1 - if options['verbosity'] >= 1: + if options["verbosity"] >= 1: self.stdout.write("%d ID mappings created." % created_count) diff --git a/wagtail_transfer/models.py b/wagtail_transfer/models.py index 455258c..9ca2b09 100644 --- a/wagtail_transfer/models.py +++ b/wagtail_transfer/models.py @@ -7,10 +7,10 @@ class IDMapping(models.Model): uid = models.UUIDField(primary_key=True) content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) local_id = models.CharField(max_length=255) - content_object = GenericForeignKey('content_type', 'local_id') + content_object = GenericForeignKey("content_type", "local_id") class Meta: - unique_together = ['content_type', 'local_id'] + unique_together = ["content_type", "local_id"] class ImportedFile(models.Model): @@ -35,7 +35,7 @@ def get_model_for_path(model_path): """ Given an 'app_name.model_name' string, return the model class """ - app_label, model_name = model_path.split('.') + app_label, model_name = model_path.split(".") return ContentType.objects.get_by_natural_key(app_label, model_name).model_class() diff --git a/wagtail_transfer/operations.py b/wagtail_transfer/operations.py index 72b6e41..3799f03 100644 --- a/wagtail_transfer/operations.py +++ b/wagtail_transfer/operations.py @@ -16,20 +16,24 @@ # Models which should be updated to their latest version when encountered in object references -default_update_related_models = ['wagtailimages.image'] +default_update_related_models = ["wagtailimages.image"] UPDATE_RELATED_MODELS = [ model_label.lower() - for model_label in getattr(settings, 'WAGTAILTRANSFER_UPDATE_RELATED_MODELS', default_update_related_models) + for model_label in getattr( + settings, "WAGTAILTRANSFER_UPDATE_RELATED_MODELS", default_update_related_models + ) ] # Models which should NOT be created in response to being encountered in object references -default_no_follow_models = ['wagtailcore.page', 'contenttypes.contenttype'] +default_no_follow_models = ["wagtailcore.page", "contenttypes.contenttype"] NO_FOLLOW_MODELS = [ model_label.lower() - for model_label in getattr(settings, 'WAGTAILTRANSFER_NO_FOLLOW_MODELS', default_no_follow_models) + for model_label in getattr( + settings, "WAGTAILTRANSFER_NO_FOLLOW_MODELS", default_no_follow_models + ) ] @@ -61,7 +65,9 @@ def _find_at_destination(self): """ # see if there's already an entry in destination_ids_by_source try: - self._destination_id = self.context.destination_ids_by_source[(self.model, self.source_id)] + self._destination_id = self.context.destination_ids_by_source[ + (self.model, self.source_id) + ] self._exists_at_destination = True return except KeyError: @@ -78,7 +84,9 @@ def _find_at_destination(self): else: self._destination_id = destination_object.pk self._exists_at_destination = True - self.context.destination_ids_by_source[(self.model, self.source_id)] = self._destination_id + self.context.destination_ids_by_source[ + (self.model, self.source_id) + ] = self._destination_id @property def exists_at_destination(self): @@ -88,10 +96,11 @@ def exists_at_destination(self): return self._exists_at_destination def __eq__(self, other): - return ( - isinstance(other, Objective) - and (self.model, self.source_id, self.must_update) == (other.model, other.source_id, other.must_update) - ) + return isinstance(other, Objective) and ( + self.model, + self.source_id, + self.must_update, + ) == (other.model, other.source_id, other.must_update) def __hash__(self): return hash((self.model, self.source_id, self.must_update)) @@ -104,6 +113,7 @@ class ImportContext: (for example, once a page is created at the destination, we add its ID mapping so that we can handle references to it that appear in other imported pages). """ + def __init__(self): # A mapping of objects on the source site to their IDs on the destination site. # Keys are tuples of (model_class, source_id); values are destination IDs. @@ -120,17 +130,18 @@ def __init__(self): class ImportPlanner: - def __init__(self, root_page_source_pk=None, destination_parent_id=None, model=None): - + def __init__( + self, root_page_source_pk=None, destination_parent_id=None, model=None + ): if root_page_source_pk or destination_parent_id: - self.import_type = 'page' + self.import_type = "page" self.root_page_source_pk = int(root_page_source_pk) if destination_parent_id is None: self.destination_parent_id = None else: self.destination_parent_id = int(destination_parent_id) elif model: - self.import_type = 'model' + self.import_type = "model" self.model = model else: raise NotImplementedError("Missing page kwargs or specified model kwarg") @@ -214,13 +225,13 @@ def add_json(self, json_data): # for each ID in the import list, add to base_import_ids as an object explicitly selected # for import - for model_path, source_id in data['ids_for_import']: + for model_path, source_id in data["ids_for_import"]: model = get_base_model_for_path(model_path) self.base_import_ids.add((model, source_id)) - # add source id -> uid mappings to the uids_by_source dict, and add objectives + # add source id -> uid mappings to the uids_by_source dict, and add objectives # for importing referenced models - for model_path, source_id, jsonish_uid in data['mappings']: + for model_path, source_id, jsonish_uid in data["mappings"]: model = get_base_model_for_path(model_path) uid = get_locator_for_model(model).uid_from_json(jsonish_uid) self.context.uids_by_source[(model, source_id)] = uid @@ -229,21 +240,22 @@ def add_json(self, json_data): if base_import or model_path not in NO_FOLLOW_MODELS: objective = Objective( - model, source_id, self.context, - must_update=(base_import or model_path in UPDATE_RELATED_MODELS) + model, + source_id, + self.context, + must_update=(base_import or model_path in UPDATE_RELATED_MODELS), ) # add to the set of objectives that need handling self._add_objective(objective) # add object data to the object_data_by_source dict - for obj_data in data['objects']: + for obj_data in data["objects"]: self._add_object_data_to_lookup(obj_data) # retry tasks that were previously postponed due to missing object data self._retry_tasks() - # Process all unhandled objectives - which may trigger new objectives as dependencies of # the resulting operations - until no unhandled objectives remain while self.unhandled_objectives: @@ -251,8 +263,8 @@ def add_json(self, json_data): self._handle_objective(objective) def _add_object_data_to_lookup(self, obj_data): - model = get_base_model_for_path(obj_data['model']) - source_id = obj_data['pk'] + model = get_base_model_for_path(obj_data["model"]) + source_id = obj_data["pk"] self.object_data_by_source[(model, source_id)] = obj_data def _add_objective(self, objective): @@ -276,14 +288,12 @@ def _add_objective(self, objective): no_update_objective.must_update = False self.objectives.discard(no_update_objective) self.unhandled_objectives.discard(no_update_objective) - + self.objectives.add(objective) self.unhandled_objectives.add(objective) - def _handle_objective(self, objective): if not objective.exists_at_destination: - # object does not exist locally - create it if we're allowed to do so, i.e. # it is in the set of objects explicitly selected for import, or it is a related object # that we have not been blocked from following by NO_FOLLOW_MODELS @@ -294,7 +304,7 @@ def _handle_objective(self, objective): # NO_FOLLOW_MODELS prevents us from creating this object self.failed_creations.add((objective.model, objective.source_id)) else: - task = ('create', objective.model, objective.source_id) + task = ("create", objective.model, objective.source_id) self._handle_task(task) else: @@ -303,7 +313,7 @@ def _handle_objective(self, objective): self.resolutions[(objective.model, objective.source_id)] = None if objective.must_update: - task = ('update', objective.model, objective.source_id) + task = ("update", objective.model, objective.source_id) self._handle_task(task) def _handle_task(self, task): @@ -339,7 +349,7 @@ def _handle_task(self, task): if (model, source_id) in self.really_missing_object_data: # object data apparently doesn't exist on the source site either, so give up on # this object entirely - if action == 'create': + if action == "create": self.failed_creations.add((model, source_id)) else: @@ -350,34 +360,45 @@ def _handle_task(self, task): return # retrieve the specific model for this object - specific_model = get_model_for_path(object_data['model']) + specific_model = get_model_for_path(object_data["model"]) if issubclass(specific_model, MP_Node): - if object_data['parent_id'] is None: + if object_data["parent_id"] is None: # This is the root node; populate destination_ids_by_source so that we use the # existing root node for any references to it, rather than creating a new one destination_id = specific_model.get_first_root_node().pk - self.context.destination_ids_by_source[(model, source_id)] = destination_id + self.context.destination_ids_by_source[ + (model, source_id) + ] = destination_id # No operation to be performed for this task operation = None - elif action == 'create': - if issubclass(specific_model, Page) and source_id == self.root_page_source_pk: + elif action == "create": + if ( + issubclass(specific_model, Page) + and source_id == self.root_page_source_pk + ): # this is the root page of the import; ignore the parent ID in the source # record and import at the requested destination instead - operation = CreateTreeModel(specific_model, object_data, self.destination_parent_id) + operation = CreateTreeModel( + specific_model, object_data, self.destination_parent_id + ) else: operation = CreateTreeModel(specific_model, object_data) else: # action == 'update' - destination_id = self.context.destination_ids_by_source[(model, source_id)] + destination_id = self.context.destination_ids_by_source[ + (model, source_id) + ] obj = specific_model.objects.get(pk=destination_id) operation = UpdateModel(obj, object_data) else: # non-tree model - if action == 'create': + if action == "create": operation = CreateModel(specific_model, object_data) else: # action == 'update' - destination_id = self.context.destination_ids_by_source[(model, source_id)] + destination_id = self.context.destination_ids_by_source[ + (model, source_id) + ] obj = specific_model.objects.get(pk=destination_id) operation = UpdateModel(obj, object_data) @@ -387,20 +408,24 @@ def _handle_task(self, task): for rel in get_all_child_relations(specific_model): related_base_model = get_base_model(rel.related_model) - for child_obj_pk in object_data['fields'][rel.name]: - + for child_obj_pk in object_data["fields"][rel.name]: # Add an objective for handling the child object. Regardless of whether # this is a 'create' or 'update' task, we want the child objects to be at # their most up-to-date versions, so set the objective to 'must update' self._add_objective( - Objective(related_base_model, child_obj_pk, self.context, must_update=True) + Objective( + related_base_model, + child_obj_pk, + self.context, + must_update=True, + ) ) if operation is not None: self.operations.add(operation) - if action == 'create': + if action == "create": # For 'create' actions, record this operation in `resolutions`, so that any operations # that identify this object as a dependency know that this operation has to happen # first. @@ -420,7 +445,12 @@ def _handle_task(self, task): if operation is not None: for model, source_id, is_hard_dep in operation.dependencies: self._add_objective( - Objective(model, source_id, self.context, must_update=(model._meta.label_lower in UPDATE_RELATED_MODELS)) + Objective( + model, + source_id, + self.context, + must_update=(model._meta.label_lower in UPDATE_RELATED_MODELS), + ) ) for instance in operation.deletions(self.context): @@ -447,13 +477,14 @@ def _retry_tasks(self): def run(self): if self.unhandled_objectives or self.postponed_tasks: - raise ImproperlyConfigured("Cannot run import until all dependencies are resoved") + raise ImproperlyConfigured( + "Cannot run import until all dependencies are resoved" + ) # filter out unsatisfiable operations statuses = {} satisfiable_operations = [ - op for op in self.operations - if self._check_satisfiable(op, statuses) + op for op in self.operations if self._check_satisfiable(op, statuses) ] # arrange operations into an order that satisfies dependencies @@ -465,14 +496,13 @@ def run(self): with transaction.atomic(): for operation in operation_order: operation.run(self.context) - + # pages must only have revisions saved after all child objects have been updated, imported, or deleted, otherwise # they will capture outdated versions of child objects in the revision for operation in operation_order: if isinstance(operation.instance, Page): operation.instance.save_revision() - def _check_satisfiable(self, operation, statuses): # Check whether the given operation's dependencies are satisfiable. statuses is a dict of # previous results - keys are (model, id) pairs and the value is: @@ -480,7 +510,7 @@ def _check_satisfiable(self, operation, statuses): # False - dependency is not satisfiable # None - the satisfiability check is currently in progress - # if we encounter this we have found a circular dependency. - for (model, id, is_hard_dep) in operation.dependencies: + for model, id, is_hard_dep in operation.dependencies: if not is_hard_dep: continue # ignore soft dependencies here @@ -568,7 +598,9 @@ def _add_to_operation_order(self, operation, operation_order, path): else: try: # recursively add the operation that we're depending on here - self._add_to_operation_order(resolution, operation_order, path + [resolution]) + self._add_to_operation_order( + resolution, operation_order, path + [resolution] + ) except CircularDependencyException: if dep_is_hard: # we can't resolve the circular dependency by breaking the chain here, @@ -591,6 +623,7 @@ class Operation: necessary to retrieve more data), finding a valid sequence to run them in, and running them all within a transaction. """ + def run(self, context): raise NotImplementedError @@ -621,6 +654,7 @@ class SaveOperationMixin: Requires subclasses to define `self.model`, `self.instance` and `self.object_data`. """ + @cached_property def base_model(self): return get_base_model(self.model) @@ -628,7 +662,7 @@ def base_model(self): def _populate_fields(self, context): for field in self.model._meta.get_fields(): try: - value = self.object_data['fields'][field.name] + value = self.object_data["fields"][field.name] except KeyError: continue @@ -646,7 +680,7 @@ def _populate_many_to_many_fields(self, context): for field in self.model._meta.get_fields(): if isinstance(field, models.ManyToManyField): try: - value = self.object_data['fields'][field.name] + value = self.object_data["fields"][field.name] except KeyError: continue target_model = get_base_model(field.related_model) @@ -676,7 +710,7 @@ def dependencies(self): deps = super().dependencies for field in self.model._meta.get_fields(): - val = self.object_data['fields'].get(field.name) + val = self.object_data["fields"].get(field.name) adapter = adapter_registry.get_field_adapter(field) if adapter: deps.update(adapter.get_dependencies(val)) @@ -688,10 +722,12 @@ def deletions(self, context): deletions = super().deletions(context) for field in self.model._meta.get_fields(): - val = self.object_data['fields'].get(field.name) + val = self.object_data["fields"].get(field.name) adapter = adapter_registry.get_field_adapter(field) if adapter: - deletions.update(adapter.get_object_deletions(self.instance, val, context)) + deletions.update( + adapter.get_object_deletions(self.instance, val, context) + ) return deletions @@ -709,12 +745,14 @@ def run(self, context): self._populate_many_to_many_fields(context) # record the UID for the newly created page - uid = context.uids_by_source[(self.base_model, self.object_data['pk'])] + uid = context.uids_by_source[(self.base_model, self.object_data["pk"])] get_locator_for_model(self.base_model).attach_uid(self.instance, uid) # Also add it to destination_ids_by_source mapping - source_pk = self.object_data['pk'] - context.destination_ids_by_source[(self.base_model, source_pk)] = self.instance.pk + source_pk = self.object_data["pk"] + context.destination_ids_by_source[ + (self.base_model, source_pk) + ] = self.instance.pk class CreateTreeModel(CreateModel): @@ -723,6 +761,7 @@ class CreateTreeModel(CreateModel): For example: Pages and Collections """ + def __init__(self, model, object_data, destination_parent_id=None): super().__init__(model, object_data) self.destination_parent_id = destination_parent_id @@ -733,7 +772,7 @@ def dependencies(self): if self.destination_parent_id is None: # need to ensure parent page is imported before this one deps.add( - (get_base_model(self.model), self.object_data['parent_id'], True), + (get_base_model(self.model), self.object_data["parent_id"], True), ) return deps @@ -742,8 +781,10 @@ def _save(self, context): if self.destination_parent_id is None: # The destination parent ID was not known at the time this operation was built, # but should now exist in the page ID mapping - source_parent_id = self.object_data['parent_id'] - self.destination_parent_id = context.destination_ids_by_source[(get_base_model(self.model), source_parent_id)] + source_parent_id = self.object_data["parent_id"] + self.destination_parent_id = context.destination_ids_by_source[ + (get_base_model(self.model), source_parent_id) + ] parent = get_base_model(self.model).objects.get(id=self.destination_parent_id) diff --git a/wagtail_transfer/richtext.py b/wagtail_transfer/richtext.py index 33b004c..77719c7 100644 --- a/wagtail_transfer/richtext.py +++ b/wagtail_transfer/richtext.py @@ -8,8 +8,8 @@ from .models import get_base_model -FIND_A_TAG = re.compile(r']*)>(.*?)') -FIND_EMBED_TAG = re.compile(r']*)/>') +FIND_A_TAG = re.compile(r"]*)>(.*?)") +FIND_EMBED_TAG = re.compile(r"]*)/>") FIND_ID = re.compile(r'id="([^"]*)"') @@ -22,7 +22,10 @@ class RichTextReferenceHandler: The tag matcher must be a compiled regular expression where the first matching group is the tag's body (ie its attributes) and the second the tag's inner contents (if any). Note this only works for tags which cannot be nested inside the same tag, so this works fine for eg matching tags since nested tags are illegal. """ - def __init__(self, handlers, tag_matcher, type_attribute, destination_ids_by_source={}): + + def __init__( + self, handlers, tag_matcher, type_attribute, destination_ids_by_source={} + ): self.handlers = handlers self.tag_matcher = tag_matcher self.type_attribute = type_attribute @@ -35,18 +38,22 @@ def update_tag_id(self, match, destination_ids_by_source): try: handler = self.handlers[attrs[self.type_attribute]] target_model = get_base_model(handler.get_model()) - new_id = destination_ids_by_source.get((target_model, int(attrs['id']))) + new_id = destination_ids_by_source.get((target_model, int(attrs["id"]))) if new_id is None: # Return the tag's inner contents, effectively removing the tag try: return match.group(2) except IndexError: # The tag has no inner content, return a blank string instead - return '' + return "" # Otherwise update the id and construct the new tag string new_tag_body = FIND_ID.sub(f'id="{str(new_id)}"', tag_body) tag_body_offset = match.start(0) - new_tag_string = match.group(0)[:(match.start(1)-tag_body_offset)] + new_tag_body + match.group(0)[(match.end(1)-tag_body_offset):] + new_tag_string = ( + match.group(0)[: (match.start(1) - tag_body_offset)] + + new_tag_body + + match.group(0)[(match.end(1) - tag_body_offset) :] + ) return new_tag_string except KeyError: # If the relevant handler cannot be found, don't update the tag id @@ -61,7 +68,7 @@ def get_objects(self, html): attrs = extract_attrs(match.group(1)) try: handler = self.handlers[attrs[self.type_attribute]] - objects.add((get_base_model(handler.get_model()), int(attrs['id']))) + objects.add((get_base_model(handler.get_model()), int(attrs["id"]))) except KeyError: # If no handler can be found, no object reference can be added. # This might occur when the link is a plain url @@ -73,11 +80,18 @@ def update_ids(self, html, destination_ids_by_source): if html is None: return None else: - return self.tag_matcher.sub(partial(self.update_tag_id, destination_ids_by_source=destination_ids_by_source), html) + return self.tag_matcher.sub( + partial( + self.update_tag_id, + destination_ids_by_source=destination_ids_by_source, + ), + html, + ) class MultiTypeRichTextReferenceHandler: """Handles retrieving object references and updating ids for several different kinds of tags in rich text""" + def __init__(self, handlers): self.handlers = handlers @@ -102,8 +116,10 @@ def get_reference_handler(): if not REFERENCE_HANDLER: embed_handlers = features.get_embed_types() link_handlers = features.get_link_types() - REFERENCE_HANDLER = MultiTypeRichTextReferenceHandler([ - RichTextReferenceHandler(link_handlers, FIND_A_TAG, 'linktype'), - RichTextReferenceHandler(embed_handlers, FIND_EMBED_TAG, 'embedtype') - ]) + REFERENCE_HANDLER = MultiTypeRichTextReferenceHandler( + [ + RichTextReferenceHandler(link_handlers, FIND_A_TAG, "linktype"), + RichTextReferenceHandler(embed_handlers, FIND_EMBED_TAG, "embedtype"), + ] + ) return REFERENCE_HANDLER diff --git a/wagtail_transfer/serializers.py b/wagtail_transfer/serializers.py index 7c29c20..f65bcec 100644 --- a/wagtail_transfer/serializers.py +++ b/wagtail_transfer/serializers.py @@ -18,10 +18,13 @@ def _get_subclasses_recurse(model): relations for select_related, adapted from https://github.com/jazzband/django-model-utils/blob/master/model_utils/managers.py """ - related_objects = [f for f in model._meta.get_fields() if isinstance(f, models.OneToOneRel)] + related_objects = [ + f for f in model._meta.get_fields() if isinstance(f, models.OneToOneRel) + ] rels = [ - rel for rel in related_objects + rel + for rel in related_objects if isinstance(rel.field, models.OneToOneField) and issubclass(rel.field.model, model) and model is not rel.field.model @@ -31,8 +34,7 @@ def _get_subclasses_recurse(model): subclasses = [] for rel in rels: for subclass in _get_subclasses_recurse(rel.field.model): - subclasses.append( - rel.get_accessor_name() + LOOKUP_SEP + subclass) + subclasses.append(rel.get_accessor_name() + LOOKUP_SEP + subclass) subclasses.append(rel.get_accessor_name()) return subclasses @@ -84,16 +86,22 @@ def __init__(self, model): continue # ignore primary keys (including MTI parent pointers) - if getattr(field, 'primary_key', False): + if getattr(field, "primary_key", False): continue adapter = adapter_registry.get_field_adapter(field) if adapter: - adapter_managed_fields = adapter_managed_fields + adapter.get_managed_fields() + adapter_managed_fields = ( + adapter_managed_fields + adapter.get_managed_fields() + ) field_adapters.append(adapter) - self.field_adapters = [adapter for adapter in field_adapters if adapter.name not in adapter_managed_fields] + self.field_adapters = [ + adapter + for adapter in field_adapters + if adapter.name not in adapter_managed_fields + ] def get_objects_by_ids(self, ids): """ @@ -113,9 +121,9 @@ def serialize_fields(self, instance): def serialize(self, instance): return { - 'model': self.model._meta.label_lower, - 'pk': instance.pk, - 'fields': self.serialize_fields(instance) + "model": self.model._meta.label_lower, + "pk": instance.pk, + "fields": self.serialize_fields(instance), } def get_object_references(self, instance): @@ -135,14 +143,14 @@ def get_objects_to_serialize(self, instance): class TreeModelSerializer(ModelSerializer): - ignored_fields = ['path', 'depth', 'numchild'] + ignored_fields = ["path", "depth", "numchild"] def serialize(self, instance): result = super().serialize(instance) if instance.is_root(): - result['parent_id'] = None + result["parent_id"] = None else: - result['parent_id'] = instance.get_parent().pk + result["parent_id"] = instance.get_parent().pk return result @@ -150,17 +158,25 @@ def get_object_references(self, instance): refs = super().get_object_references(instance) if not instance.is_root(): # add a reference for the parent ID - refs.add( - (self.base_model, instance.get_parent().pk) - ) + refs.add((self.base_model, instance.get_parent().pk)) return refs class PageSerializer(TreeModelSerializer): ignored_fields = TreeModelSerializer.ignored_fields + [ - 'url_path', 'content_type', 'draft_title', 'has_unpublished_changes', 'owner', - 'go_live_at', 'expire_at', 'expired', 'locked', 'first_published_at', 'last_published_at', - 'latest_revision_created_at', 'live_revision', + "url_path", + "content_type", + "draft_title", + "has_unpublished_changes", + "owner", + "go_live_at", + "expire_at", + "expired", + "locked", + "first_published_at", + "last_published_at", + "latest_revision_created_at", + "live_revision", ] def get_objects_by_ids(self, ids): @@ -182,7 +198,7 @@ def __init__(self): def _scan_for_serializers(self): serializers = dict(self.BASE_SERIALIZERS_BY_MODEL_CLASS) - for fn in hooks.get_hooks('register_custom_serializers'): + for fn in hooks.get_hooks("register_custom_serializers"): serializers.update(fn()) self.serializers_by_model_class = serializers diff --git a/wagtail_transfer/streamfield.py b/wagtail_transfer/streamfield.py index 28a1657..8ca4220 100644 --- a/wagtail_transfer/streamfield.py +++ b/wagtail_transfer/streamfield.py @@ -45,7 +45,9 @@ def update_object_ids(stream_block, stream, destination_ids_by_source): """Loops over list-of-dicts formatted StreamField (stream) to update object references. This format is used as opposed to the StreamChild object format to prevent ChooserBlocks trying to load nonexistent models with old ids upon to_python being called""" - update_ids = partial(update_ids_using_handler, destination_ids_by_source=destination_ids_by_source) + update_ids = partial( + update_ids_using_handler, destination_ids_by_source=destination_ids_by_source + ) stream_block_handler = get_block_handler(stream_block) try: updated_stream = stream_block_handler.map_over_json(stream, update_ids) @@ -81,7 +83,7 @@ def map_over_json(self, stream, func): """ value = func(self.block, stream) if self.block.required and value is None: - raise ValidationError('This block requires a value') + raise ValidationError("This block requires a value") return value @@ -91,16 +93,20 @@ def map_over_json(self, stream, func): new_block = self.block.child_block new_block_handler = get_block_handler(new_block) block_is_in_new_format = getattr( - self.block, - "_item_is_in_block_format", - lambda x: False + self.block, "_item_is_in_block_format", lambda x: False ) for element in stream: try: if block_is_in_new_format(element): # We are dealing with new-style ListBlock representation - new_value = new_block_handler.map_over_json(element['value'], func) - updated_stream.append({'type': element['type'], 'value': new_value, 'id': element['id']}) + new_value = new_block_handler.map_over_json(element["value"], func) + updated_stream.append( + { + "type": element["type"], + "value": new_value, + "id": element["id"], + } + ) else: new_value = new_block_handler.map_over_json(element, func) updated_stream.append(new_value) @@ -117,17 +123,19 @@ class StreamBlockHandler(BaseBlockHandler): def map_over_json(self, stream, func): updated_stream = [] for element in stream: - new_block = self.block.child_blocks.get(element['type']) + new_block = self.block.child_blocks.get(element["type"]) new_block_handler = get_block_handler(new_block) - new_stream = element['value'] + new_stream = element["value"] try: new_value = new_block_handler.map_over_json(new_stream, func) - updated_stream.append({'type': element['type'], 'value': new_value, 'id': element['id']}) + updated_stream.append( + {"type": element["type"], "value": new_value, "id": element["id"]} + ) except ValidationError: # Omit the block if a required field was left blank due to the import pass if self.block.required and not updated_stream: - raise ValidationError('This block requires a value') + raise ValidationError("This block requires a value") return updated_stream @property @@ -148,7 +156,9 @@ def map_over_json(self, stream, func): new_value = new_block_handler.map_over_json(new_stream, func) except ValidationError: if new_block.required: - raise ValidationError(f'This block requires a value for {new_block}') + raise ValidationError( + f"This block requires a value for {new_block}" + ) else: # If the new block isn't required, just set it to the empty value new_value = new_block_handler.empty_value @@ -172,7 +182,9 @@ def get_object_references(self, value): return set() def update_ids(self, value, destination_ids_by_source): - value = destination_ids_by_source.get((get_base_model(self.block.model_class), value)) + value = destination_ids_by_source.get( + (get_base_model(self.block.model_class), value) + ) return value diff --git a/wagtail_transfer/urls.py b/wagtail_transfer/urls.py index 37ccf78..0f223c6 100644 --- a/wagtail_transfer/urls.py +++ b/wagtail_transfer/urls.py @@ -8,14 +8,35 @@ from .vendor.wagtail_api_v2.router import WagtailAPIRouter -chooser_api = WagtailAPIRouter('wagtail_transfer_page_chooser_api') -chooser_api.register_endpoint('pages', views.PageChooserAPIViewSet) -chooser_api.register_endpoint('models', ModelsAPIViewSet) +chooser_api = WagtailAPIRouter("wagtail_transfer_page_chooser_api") +chooser_api.register_endpoint("pages", views.PageChooserAPIViewSet) +chooser_api.register_endpoint("models", ModelsAPIViewSet) urlpatterns = [ - re_path(r'^api/pages/(\d+)/$', views.pages_for_export, name='wagtail_transfer_pages'), - path('api/models//', views.models_for_export, name='wagtail_transfer_model'), - path('api/models///', views.models_for_export, name='wagtail_transfer_model_object'), - re_path(r'^api/objects/$', views.objects_for_export, name='wagtail_transfer_objects'), - re_path(r'^api/chooser/', (decorate_urlpatterns(chooser_api.get_urlpatterns(), check_get_digest_wrapper), chooser_api.url_namespace, chooser_api.url_namespace)), + re_path( + r"^api/pages/(\d+)/$", views.pages_for_export, name="wagtail_transfer_pages" + ), + path( + "api/models//", + views.models_for_export, + name="wagtail_transfer_model", + ), + path( + "api/models///", + views.models_for_export, + name="wagtail_transfer_model_object", + ), + re_path( + r"^api/objects/$", views.objects_for_export, name="wagtail_transfer_objects" + ), + re_path( + r"^api/chooser/", + ( + decorate_urlpatterns( + chooser_api.get_urlpatterns(), check_get_digest_wrapper + ), + chooser_api.url_namespace, + chooser_api.url_namespace, + ), + ), ] diff --git a/wagtail_transfer/views.py b/wagtail_transfer/views.py index ffa17d6..9d279be 100644 --- a/wagtail_transfer/views.py +++ b/wagtail_transfer/views.py @@ -27,15 +27,17 @@ def pages_for_export(request, root_page_id): - check_digest(str(root_page_id), request.GET.get('digest', '')) + check_digest(str(root_page_id), request.GET.get("digest", "")) root_page = get_object_or_404(Page, id=root_page_id) - pages = [root_page.specific] if request.GET.get('recursive', 'true') == 'false' else root_page.get_descendants(inclusive=True).specific() + pages = ( + [root_page.specific] + if request.GET.get("recursive", "true") == "false" + else root_page.get_descendants(inclusive=True).specific() + ) - ids_for_import = [ - ['wagtailcore.page', page.pk] for page in pages - ] + ids_for_import = [["wagtailcore.page", page.pk] for page in pages] objects = [] object_references = set() @@ -48,20 +50,23 @@ def pages_for_export(request, root_page_id): serializer = serializer_registry.get_model_serializer(type(model)) objects.append(serializer.serialize(model)) object_references.update(serializer.get_object_references(model)) - models_to_serialize.update(serializer.get_objects_to_serialize(model).difference(serialized_models)) + models_to_serialize.update( + serializer.get_objects_to_serialize(model).difference(serialized_models) + ) mappings = [] for model, pk in object_references: uid = get_locator_for_model(model).get_uid_for_local_id(pk) - mappings.append( - [model._meta.label_lower, pk, uid] - ) + mappings.append([model._meta.label_lower, pk, uid]) - return JsonResponse({ - 'ids_for_import': ids_for_import, - 'mappings': mappings, - 'objects': objects, - }, json_dumps_params={'indent': 2}) + return JsonResponse( + { + "ids_for_import": ids_for_import, + "mappings": mappings, + "objects": objects, + }, + json_dumps_params={"indent": 2}, + ) def models_for_export(request, model_path, object_id=None): @@ -70,10 +75,10 @@ def models_for_export(request, model_path, object_id=None): If an object_id is provided, search for a single model object. """ - check_digest(str(model_path), request.GET.get('digest', '')) + check_digest(str(model_path), request.GET.get("digest", "")) # 1. Confirm whether or not th model_path leads to a real model. - app_label, model_name = model_path.split('.') + app_label, model_name = model_path.split(".") Model = ContentType.objects.get_by_natural_key(app_label, model_name).model_class() if object_id is None: @@ -82,9 +87,7 @@ def models_for_export(request, model_path, object_id=None): model_objects = [Model.objects.get(pk=object_id)] # 2. If this was just a model and not a specific object, get all child IDs. - ids_for_import = [ - [model_path, obj.pk] for obj in model_objects - ] + ids_for_import = [[model_path, obj.pk] for obj in model_objects] objects = [] object_references = set() @@ -97,20 +100,23 @@ def models_for_export(request, model_path, object_id=None): serializer = serializer_registry.get_model_serializer(type(model)) objects.append(serializer.serialize(model)) object_references.update(serializer.get_object_references(model)) - models_to_serialize.update(serializer.get_objects_to_serialize(model).difference(serialized_models)) + models_to_serialize.update( + serializer.get_objects_to_serialize(model).difference(serialized_models) + ) mappings = [] for model, pk in object_references: uid = get_locator_for_model(model).get_uid_for_local_id(pk) - mappings.append( - [model._meta.label_lower, pk, uid] - ) + mappings.append([model._meta.label_lower, pk, uid]) - return JsonResponse({ - 'ids_for_import': ids_for_import, - 'mappings': mappings, - 'objects': objects, - }, json_dumps_params={'indent': 2}) + return JsonResponse( + { + "ids_for_import": ids_for_import, + "mappings": mappings, + "objects": objects, + }, + json_dumps_params={"indent": 2}, + ) @csrf_exempt @@ -125,9 +131,9 @@ def objects_for_export(request): and returns an API response with objects / mappings populated (but ids_for_import empty). """ - check_digest(request.body, request.GET.get('digest', '')) + check_digest(request.body, request.GET.get("digest", "")) - request_data = json.loads(request.body.decode('utf-8')) + request_data = json.loads(request.body.decode("utf-8")) objects = [] object_references = set() @@ -144,28 +150,36 @@ def objects_for_export(request): serializer = serializer_registry.get_model_serializer(type(instance)) objects.append(serializer.serialize(instance)) object_references.update(serializer.get_object_references(instance)) - models_to_serialize.update(serializer.get_objects_to_serialize(instance).difference(serialized_models)) + models_to_serialize.update( + serializer.get_objects_to_serialize(instance).difference( + serialized_models + ) + ) mappings = [] for model, pk in object_references: uid = get_locator_for_model(model).get_uid_for_local_id(pk) - mappings.append( - [model._meta.label_lower, pk, uid] - ) + mappings.append([model._meta.label_lower, pk, uid]) - return JsonResponse({ - 'ids_for_import': [], - 'mappings': mappings, - 'objects': objects, - }, json_dumps_params={'indent': 2}) + return JsonResponse( + { + "ids_for_import": [], + "mappings": mappings, + "objects": objects, + }, + json_dumps_params={"indent": 2}, + ) class UIDField(ReadOnlyField): """ Serializes UID for the Page Chooser API """ + def get_attribute(self, instance): - return get_locator_for_model(Page).get_uid_for_local_id(instance.id, create=False) + return get_locator_for_model(Page).get_uid_for_local_id( + instance.id, create=False + ) class TransferPageChooserSerializer(AdminPageSerializer): @@ -174,37 +188,39 @@ class TransferPageChooserSerializer(AdminPageSerializer): class PageChooserAPIViewSet(PagesAdminAPIViewSet): base_serializer_class = TransferPageChooserSerializer - meta_fields = PagesAdminAPIViewSet.meta_fields + [ - 'uid' - ] - listing_default_fields = PagesAdminAPIViewSet.listing_default_fields + [ - 'uid' - ] + meta_fields = PagesAdminAPIViewSet.meta_fields + ["uid"] + listing_default_fields = PagesAdminAPIViewSet.listing_default_fields + ["uid"] @permission_required( "wagtail_transfer.wagtailtransfer_can_import", login_url="wagtailadmin_login" ) def chooser_api_proxy(request, source_name, path): - source_config = getattr(settings, 'WAGTAILTRANSFER_SOURCES', {}).get(source_name) + source_config = getattr(settings, "WAGTAILTRANSFER_SOURCES", {}).get(source_name) - api_proxy_timeout_seconds = getattr(settings, 'WAGTAILTRANSFER_CHOOSER_API_PROXY_TIMEOUT', 5) + api_proxy_timeout_seconds = getattr( + settings, "WAGTAILTRANSFER_CHOOSER_API_PROXY_TIMEOUT", 5 + ) if source_config is None: raise Http404("Source does not exist") - default_chooser_endpoint = 'pages' - if 'models' in request.GET: - default_chooser_endpoint = 'models' + default_chooser_endpoint = "pages" + if "models" in request.GET: + default_chooser_endpoint = "models" - base_url = source_config['BASE_URL'] + f'api/chooser/{default_chooser_endpoint}/' + base_url = source_config["BASE_URL"] + f"api/chooser/{default_chooser_endpoint}/" message = request.GET.urlencode() digest = digest_for_source(source_name, message) - response = requests.get(f"{base_url}{path}?{message}&digest={digest}", headers={ - 'Accept': request.META['HTTP_ACCEPT'], - }, timeout=api_proxy_timeout_seconds) + response = requests.get( + f"{base_url}{path}?{message}&digest={digest}", + headers={ + "Accept": request.META["HTTP_ACCEPT"], + }, + timeout=api_proxy_timeout_seconds, + ) return HttpResponse(response.content, status=response.status_code) @@ -213,20 +229,31 @@ def chooser_api_proxy(request, source_name, path): "wagtail_transfer.wagtailtransfer_can_import", login_url="wagtailadmin_login" ) def choose_page(request): - return render(request, 'wagtail_transfer/choose_page.html', { - 'sources_data': json.dumps([ - { - 'value': source_name, - 'label': source_name, - 'page_chooser_api': reverse('wagtail_transfer_admin:chooser_api_proxy', args=[source_name, '']) - } - for source_name in getattr(settings, 'WAGTAILTRANSFER_SOURCES', {}).keys() - ]), - }) + return render( + request, + "wagtail_transfer/choose_page.html", + { + "sources_data": json.dumps( + [ + { + "value": source_name, + "label": source_name, + "page_chooser_api": reverse( + "wagtail_transfer_admin:chooser_api_proxy", + args=[source_name, ""], + ), + } + for source_name in getattr( + settings, "WAGTAILTRANSFER_SOURCES", {} + ).keys() + ] + ), + }, + ) def import_missing_object_data(source, importer: ImportPlanner): - base_url = settings.WAGTAILTRANSFER_SOURCES[source]['BASE_URL'] + base_url = settings.WAGTAILTRANSFER_SOURCES[source]["BASE_URL"] while importer.missing_object_data: # convert missing_object_data from a set of (model_class, id) tuples # into a dict of {model_class_label: [list_of_ids]} @@ -234,15 +261,17 @@ def import_missing_object_data(source, importer: ImportPlanner): for model_class, source_id in importer.missing_object_data: missing_object_data_by_type[model_class].append(source_id) - request_data = json.dumps({ - model_class._meta.label_lower: ids - for model_class, ids in missing_object_data_by_type.items() - }) + request_data = json.dumps( + { + model_class._meta.label_lower: ids + for model_class, ids in missing_object_data_by_type.items() + } + ) digest = digest_for_source(source, request_data) # request the missing object data and add to the import plan response = requests.post( - f"{base_url}api/objects/", params={'digest': digest}, data=request_data + f"{base_url}api/objects/", params={"digest": digest}, data=request_data ) importer.add_json(response.content) importer.run() @@ -250,27 +279,32 @@ def import_missing_object_data(source, importer: ImportPlanner): def import_page(request): - source = request.POST['source'] - base_url = settings.WAGTAILTRANSFER_SOURCES[source]['BASE_URL'] - digest = digest_for_source(source, str(request.POST['source_page_id'])) - - response = requests.get(f"{base_url}api/pages/{request.POST['source_page_id']}/", params={'digest': digest}) - - dest_page_id = request.POST['dest_page_id'] or None - importer = ImportPlanner.for_page(source=request.POST['source_page_id'], destination=dest_page_id) + source = request.POST["source"] + base_url = settings.WAGTAILTRANSFER_SOURCES[source]["BASE_URL"] + digest = digest_for_source(source, str(request.POST["source_page_id"])) + + response = requests.get( + f"{base_url}api/pages/{request.POST['source_page_id']}/", + params={"digest": digest}, + ) + + dest_page_id = request.POST["dest_page_id"] or None + importer = ImportPlanner.for_page( + source=request.POST["source_page_id"], destination=dest_page_id + ) importer.add_json(response.content) importer = import_missing_object_data(source, importer) if dest_page_id: - return redirect('wagtailadmin_explore', dest_page_id) + return redirect("wagtailadmin_explore", dest_page_id) else: - return redirect('wagtailadmin_explore_root') + return redirect("wagtailadmin_explore_root") def import_model(request): - source = request.POST['source'] - model = request.POST['source_model'] - base_url = settings.WAGTAILTRANSFER_SOURCES[source]['BASE_URL'] + source = request.POST["source"] + model = request.POST["source_model"] + base_url = settings.WAGTAILTRANSFER_SOURCES[source]["BASE_URL"] digest = digest_for_source(source, model) url = f"{base_url}api/models/{model}/" @@ -278,14 +312,14 @@ def import_model(request): source_model_object_id = request.POST.get("source_model_object_id") url = f"{url}{source_model_object_id}/" - response = requests.get(url, params={'digest': digest}) + response = requests.get(url, params={"digest": digest}) importer = ImportPlanner.for_model(model=model) importer.add_json(response.content) importer = import_missing_object_data(source, importer) - messages.add_message(request, messages.SUCCESS, 'Snippet(s) successfully imported') - app_label, model_name = model.split('.') - return redirect(f'wagtailsnippets_{app_label}_{model_name}:list') + messages.add_message(request, messages.SUCCESS, "Snippet(s) successfully imported") + app_label, model_name = model.split(".") + return redirect(f"wagtailsnippets_{app_label}_{model_name}:list") @permission_required( @@ -293,10 +327,10 @@ def import_model(request): ) @require_POST def do_import(request): - post_type = request.POST.get('type', 'page') - if post_type == 'page': + post_type = request.POST.get("type", "page") + if post_type == "page": return import_page(request) - elif post_type == 'model': + elif post_type == "model": return import_model(request) @@ -305,8 +339,8 @@ def check_page_existence_for_uid(request): Check whether a page with the specified UID exists - used for checking whether a page has already been imported to the destination site """ - uid = request.GET.get('uid', '') + uid = request.GET.get("uid", "") locator = get_locator_for_model(Page) page_exists = bool(locator.find(uid)) result = status.HTTP_200_OK if page_exists else status.HTTP_404_NOT_FOUND - return HttpResponse('', status=result) + return HttpResponse("", status=result) diff --git a/wagtail_transfer/wagtail_hooks.py b/wagtail_transfer/wagtail_hooks.py index ecf3a38..da0b88d 100644 --- a/wagtail_transfer/wagtail_hooks.py +++ b/wagtail_transfer/wagtail_hooks.py @@ -7,10 +7,13 @@ from . import admin_urls -@hooks.register('register_admin_urls') +@hooks.register("register_admin_urls") def register_admin_urls(): return [ - re_path(r'^wagtail-transfer/', include(admin_urls, namespace='wagtail_transfer_admin')), + re_path( + r"^wagtail-transfer/", + include(admin_urls, namespace="wagtail_transfer_admin"), + ), ] @@ -24,9 +27,14 @@ def is_shown(self, request): ) -@hooks.register('register_admin_menu_item') +@hooks.register("register_admin_menu_item") def register_admin_menu_item(): - return WagtailTransferMenuItem('Import', reverse('wagtail_transfer_admin:choose_page'), classnames='icon icon-doc-empty-inverse', order=10000) + return WagtailTransferMenuItem( + "Import", + reverse("wagtail_transfer_admin:choose_page"), + classnames="icon icon-doc-empty-inverse", + order=10000, + ) @hooks.register("register_permissions") From b83aab8c4442b3e1cf95950a5b1fcbae0d7ad5b1 Mon Sep 17 00:00:00 2001 From: Joshua Munn Date: Wed, 25 Oct 2023 13:35:49 +0100 Subject: [PATCH 05/10] Second pass at ruff formatting --- .circleci/report_nightly_build_failure.py | 1 - runtests.py | 1 - setup.py | 1 - tests/settings.py | 1 - tests/tests/test_api.py | 2 -- tests/tests/test_import.py | 2 -- tests/tests/test_views.py | 1 - tests/urls.py | 1 - wagtail_transfer/admin_urls.py | 1 - wagtail_transfer/auth.py | 1 - wagtail_transfer/field_adapters.py | 2 -- wagtail_transfer/files.py | 2 -- wagtail_transfer/locators.py | 2 -- wagtail_transfer/management/commands/preseed_transfer_table.py | 1 - wagtail_transfer/operations.py | 2 -- wagtail_transfer/richtext.py | 2 -- wagtail_transfer/urls.py | 1 - wagtail_transfer/views.py | 2 -- 18 files changed, 26 deletions(-) diff --git a/.circleci/report_nightly_build_failure.py b/.circleci/report_nightly_build_failure.py index 77a9fbc..59c42df 100644 --- a/.circleci/report_nightly_build_failure.py +++ b/.circleci/report_nightly_build_failure.py @@ -7,7 +7,6 @@ import requests - if "SLACK_WEBHOOK_URL" in os.environ: print("Reporting to #nightly-build-failures slack channel") response = requests.post( diff --git a/runtests.py b/runtests.py index c3dd1c3..4dc6d8a 100755 --- a/runtests.py +++ b/runtests.py @@ -5,6 +5,5 @@ from django.core.management import execute_from_command_line - os.environ["DJANGO_SETTINGS_MODULE"] = "tests.settings" execute_from_command_line([sys.argv[0], "test"] + sys.argv[1:]) diff --git a/setup.py b/setup.py index e9b80c5..1e3ae75 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,6 @@ from setuptools import find_packages, setup - setup( name="wagtail-transfer", version="0.9.1", diff --git a/tests/settings.py b/tests/settings.py index d593a17..9b859ca 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -1,6 +1,5 @@ import os - BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) diff --git a/tests/tests/test_api.py b/tests/tests/test_api.py index 25c425b..ee0e22f 100644 --- a/tests/tests/test_api.py +++ b/tests/tests/test_api.py @@ -2,7 +2,6 @@ import os.path import shutil import uuid - from datetime import datetime, timezone from unittest import mock @@ -30,7 +29,6 @@ from wagtail_transfer.auth import digest_for_source from wagtail_transfer.models import IDMapping - # We could use settings.MEDIA_ROOT here, but this way we avoid clobbering a real media folder if we # ever run these tests with non-test settings for any reason TEST_MEDIA_DIR = os.path.join(os.path.join(settings.BASE_DIR, "test-media")) diff --git a/tests/tests/test_import.py b/tests/tests/test_import.py index a5448c7..2fed42b 100644 --- a/tests/tests/test_import.py +++ b/tests/tests/test_import.py @@ -1,7 +1,6 @@ import importlib import os.path import shutil - from datetime import datetime, timezone from unittest import mock @@ -31,7 +30,6 @@ from wagtail_transfer.models import IDMapping from wagtail_transfer.operations import ImportPlanner - # We could use settings.MEDIA_ROOT here, but this way we avoid clobbering a real media folder if we # ever run these tests with non-test settings for any reason TEST_MEDIA_DIR = os.path.join(os.path.join(settings.BASE_DIR, "test-media")) diff --git a/tests/tests/test_views.py b/tests/tests/test_views.py index 160949f..d1db6fd 100644 --- a/tests/tests/test_views.py +++ b/tests/tests/test_views.py @@ -1,5 +1,4 @@ import json - from datetime import date, datetime, timezone from unittest import mock diff --git a/tests/urls.py b/tests/urls.py index 49cc456..63ac19b 100644 --- a/tests/urls.py +++ b/tests/urls.py @@ -4,7 +4,6 @@ from wagtail_transfer import urls as wagtailtransfer_urls - urlpatterns = [ re_path(r"^admin/", include(wagtailadmin_urls)), re_path(r"^wagtail-transfer/", include(wagtailtransfer_urls)), diff --git a/wagtail_transfer/admin_urls.py b/wagtail_transfer/admin_urls.py index f403652..36f3d53 100644 --- a/wagtail_transfer/admin_urls.py +++ b/wagtail_transfer/admin_urls.py @@ -4,7 +4,6 @@ from .vendor.wagtail_api_v2.router import WagtailAPIRouter - chooser_api = WagtailAPIRouter("wagtail_transfer_admin:page_chooser_api") chooser_api.register_endpoint("pages", views.PageChooserAPIViewSet) diff --git a/wagtail_transfer/auth.py b/wagtail_transfer/auth.py index 595766a..b913b10 100644 --- a/wagtail_transfer/auth.py +++ b/wagtail_transfer/auth.py @@ -5,7 +5,6 @@ from django.conf import settings from django.core.exceptions import PermissionDenied - GROUP_QUERY_WITH_DIGEST = re.compile( "(?P.*?)&?digest=(?P[^&]*)(?P.*)" ) diff --git a/wagtail_transfer/field_adapters.py b/wagtail_transfer/field_adapters.py index 0f63686..5247ef7 100644 --- a/wagtail_transfer/field_adapters.py +++ b/wagtail_transfer/field_adapters.py @@ -1,6 +1,5 @@ import json import pathlib - from functools import lru_cache from urllib.parse import urlparse @@ -22,7 +21,6 @@ from .richtext import get_reference_handler from .streamfield import get_object_references, update_object_ids - WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS = getattr( settings, "WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS", diff --git a/wagtail_transfer/files.py b/wagtail_transfer/files.py index 4200de3..ea40baf 100644 --- a/wagtail_transfer/files.py +++ b/wagtail_transfer/files.py @@ -1,9 +1,7 @@ import hashlib - from contextlib import contextmanager import requests - from django.core.files.base import ContentFile from .models import ImportedFile diff --git a/wagtail_transfer/locators.py b/wagtail_transfer/locators.py index 7e4119d..1bbb8f2 100644 --- a/wagtail_transfer/locators.py +++ b/wagtail_transfer/locators.py @@ -5,7 +5,6 @@ """ import uuid - from functools import lru_cache from django.conf import settings @@ -15,7 +14,6 @@ from .models import IDMapping, get_base_model - UUID_SEQUENCE = 0 # dict of models that should be located by field values using FieldLocator, diff --git a/wagtail_transfer/management/commands/preseed_transfer_table.py b/wagtail_transfer/management/commands/preseed_transfer_table.py index b1d0e13..74dc41e 100644 --- a/wagtail_transfer/management/commands/preseed_transfer_table.py +++ b/wagtail_transfer/management/commands/preseed_transfer_table.py @@ -7,7 +7,6 @@ from wagtail_transfer.models import IDMapping, get_base_model, get_model_for_path - # Namespace UUID common to all wagtail-transfer installances, used with uuid5 to generate # a predictable UUID for any given model-name / PK combination NAMESPACE = uuid.UUID("418b5168-5a10-11ea-a84b-7831c1c42e66") diff --git a/wagtail_transfer/operations.py b/wagtail_transfer/operations.py index 3799f03..6df55ee 100644 --- a/wagtail_transfer/operations.py +++ b/wagtail_transfer/operations.py @@ -1,5 +1,4 @@ import json - from copy import copy from django.conf import settings @@ -14,7 +13,6 @@ from .locators import get_locator_for_model from .models import get_base_model, get_base_model_for_path, get_model_for_path - # Models which should be updated to their latest version when encountered in object references default_update_related_models = ["wagtailimages.image"] diff --git a/wagtail_transfer/richtext.py b/wagtail_transfer/richtext.py index 77719c7..6cafde1 100644 --- a/wagtail_transfer/richtext.py +++ b/wagtail_transfer/richtext.py @@ -1,5 +1,4 @@ import re - from functools import partial from wagtail.rich_text import features @@ -7,7 +6,6 @@ from .models import get_base_model - FIND_A_TAG = re.compile(r"]*)>(.*?)") FIND_EMBED_TAG = re.compile(r"]*)/>") FIND_ID = re.compile(r'id="([^"]*)"') diff --git a/wagtail_transfer/urls.py b/wagtail_transfer/urls.py index 0f223c6..4531856 100644 --- a/wagtail_transfer/urls.py +++ b/wagtail_transfer/urls.py @@ -7,7 +7,6 @@ from .vendor.wagtail_api_v2.router import WagtailAPIRouter - chooser_api = WagtailAPIRouter("wagtail_transfer_page_chooser_api") chooser_api.register_endpoint("pages", views.PageChooserAPIViewSet) chooser_api.register_endpoint("models", ModelsAPIViewSet) diff --git a/wagtail_transfer/views.py b/wagtail_transfer/views.py index 9d279be..b944d64 100644 --- a/wagtail_transfer/views.py +++ b/wagtail_transfer/views.py @@ -1,9 +1,7 @@ import json - from collections import defaultdict import requests - from django.conf import settings from django.contrib import messages from django.contrib.auth.decorators import permission_required From c6cf87660d0e685121d8c9e62729c30c4641df07 Mon Sep 17 00:00:00 2001 From: Joshua Munn Date: Wed, 25 Oct 2023 13:36:17 +0100 Subject: [PATCH 06/10] Manual fixups for issues raised by ruff --- wagtail_transfer/files.py | 4 ++-- .../management/commands/preseed_transfer_table.py | 12 ++++++++---- wagtail_transfer/operations.py | 6 +++--- wagtail_transfer/richtext.py | 4 ++-- wagtail_transfer/streamfield.py | 4 ++-- 5 files changed, 17 insertions(+), 13 deletions(-) diff --git a/wagtail_transfer/files.py b/wagtail_transfer/files.py index ea40baf..2e839b0 100644 --- a/wagtail_transfer/files.py +++ b/wagtail_transfer/files.py @@ -18,7 +18,7 @@ def open_file(field, file): # First check if the file is stored on the local filesystem try: - file.path + file.path # noqa: B018 is_local = True except NotImplementedError: @@ -85,7 +85,7 @@ def get_file_hash(field, instance): # Fall back to calculating it on the fly with open_file(field, field.value_from_object(instance)) as f: - return hashlib.sha1(f.read()).hexdigest() + return hashlib.sha1(f.read()).hexdigest() # noqa: S324 class FileTransferError(Exception): diff --git a/wagtail_transfer/management/commands/preseed_transfer_table.py b/wagtail_transfer/management/commands/preseed_transfer_table.py index 74dc41e..f876528 100644 --- a/wagtail_transfer/management/commands/preseed_transfer_table.py +++ b/wagtail_transfer/management/commands/preseed_transfer_table.py @@ -34,8 +34,10 @@ def handle(self, *args, **options): # interpret as a model try: model = get_model_for_path(label) - except ObjectDoesNotExist: - raise CommandError("%r is not recognised as a model name." % label) + except ObjectDoesNotExist as err: + raise CommandError( + "%r is not recognised as a model name." % label + ) from err if model != get_base_model(model): raise CommandError( @@ -48,8 +50,10 @@ def handle(self, *args, **options): # interpret label as an app try: app = apps.get_app_config(label) - except LookupError: - raise CommandError("%r is not recognised as an app label." % label) + except LookupError as err: + raise CommandError( + "%r is not recognised as an app label." % label + ) from err for model in app.get_models(): if model == get_base_model(model): diff --git a/wagtail_transfer/operations.py b/wagtail_transfer/operations.py index 6df55ee..089591e 100644 --- a/wagtail_transfer/operations.py +++ b/wagtail_transfer/operations.py @@ -441,7 +441,7 @@ def _handle_task(self, task): self.task_resolutions[task] = operation if operation is not None: - for model, source_id, is_hard_dep in operation.dependencies: + for model, source_id, _ in operation.dependencies: self._add_objective( Objective( model, @@ -571,11 +571,11 @@ def _add_to_operation_order(self, operation, operation_order, path): # If everything is working properly, this should be a case we already encountered # during task / objective solving and logged in failed_creations. - assert (dep_model, dep_source_id) in self.failed_creations + assert (dep_model, dep_source_id) in self.failed_creations # noqa: S101 # Also, it should be a soft dependency, since we've eliminated unsatisfiable hard # hard dependencies during _check_satisfiable. - assert not dep_is_hard + assert not dep_is_hard # noqa: S101 # Since this is a soft dependency, we can (and must!) leave it unsatisfied. # Abandon this dependency and move on to the next in the list diff --git a/wagtail_transfer/richtext.py b/wagtail_transfer/richtext.py index 6cafde1..5ddcbec 100644 --- a/wagtail_transfer/richtext.py +++ b/wagtail_transfer/richtext.py @@ -22,12 +22,12 @@ class RichTextReferenceHandler: """ def __init__( - self, handlers, tag_matcher, type_attribute, destination_ids_by_source={} + self, handlers, tag_matcher, type_attribute, destination_ids_by_source=None ): self.handlers = handlers self.tag_matcher = tag_matcher self.type_attribute = type_attribute - self.destination_ids_by_source = destination_ids_by_source + self.destination_ids_by_source = destination_ids_by_source or {} def update_tag_id(self, match, destination_ids_by_source): # Updates a specific tag's id from source to destination Wagtail instance, or removes the tag if no id mapping exists diff --git a/wagtail_transfer/streamfield.py b/wagtail_transfer/streamfield.py index 8ca4220..88f3ddb 100644 --- a/wagtail_transfer/streamfield.py +++ b/wagtail_transfer/streamfield.py @@ -154,11 +154,11 @@ def map_over_json(self, stream, func): new_stream = stream[key] try: new_value = new_block_handler.map_over_json(new_stream, func) - except ValidationError: + except ValidationError as err: if new_block.required: raise ValidationError( f"This block requires a value for {new_block}" - ) + ) from err else: # If the new block isn't required, just set it to the empty value new_value = new_block_handler.empty_value From 32c4a735f4f94700f89a12c839f28967b736926c Mon Sep 17 00:00:00 2001 From: Joshua Munn Date: Wed, 25 Oct 2023 13:47:33 +0100 Subject: [PATCH 07/10] Add pre-commit config --- .pre-commit-config.yaml | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..d059c78 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,32 @@ +exclude: | + (?x)( + wagtail_transfer.js + |document.txt + |.babelrc + ) +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-added-large-files + - id: check-case-conflict + - id: check-json + - id: check-merge-conflict + - id: check-symlinks + - id: check-toml + - id: check-yaml + args: ['--unsafe'] + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/adamchainz/blacken-docs + rev: 1.16.0 + hooks: + - id: blacken-docs + additional_dependencies: [black==23.10.0] + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: 'v0.1.2' + hooks: + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + - id: ruff-format + args: [--check] From 84f991d844dcbfc29e4ce9a0b0afb3846c7dcd06 Mon Sep 17 00:00:00 2001 From: Joshua Munn Date: Wed, 25 Oct 2023 13:48:04 +0100 Subject: [PATCH 08/10] Apply pre-commit fixes --- docs/img/wagtail_transfer_logo.svg | 2 +- docs/management_commands.md | 1 - docs/settings.md | 100 ++++++++++++++--------------- docs/setup.md | 25 ++++---- 4 files changed, 63 insertions(+), 65 deletions(-) diff --git a/docs/img/wagtail_transfer_logo.svg b/docs/img/wagtail_transfer_logo.svg index f041aeb..311fffd 100644 --- a/docs/img/wagtail_transfer_logo.svg +++ b/docs/img/wagtail_transfer_logo.svg @@ -1 +1 @@ - \ No newline at end of file + diff --git a/docs/management_commands.md b/docs/management_commands.md index 0f2060d..2a89aea 100644 --- a/docs/management_commands.md +++ b/docs/management_commands.md @@ -30,4 +30,3 @@ Suppose a site has been developed and populated with content on a staging enviro * On both instances, run: `./manage.py preseed_transfer_table wagtailcore.page --range=1-199` The `preseed_transfer_table` command generates consistent UUIDs between the two site instances, so any transfers involving this ID range will recognise the pages as matching, and handle them as updates rather than creations. - \ No newline at end of file diff --git a/docs/settings.md b/docs/settings.md index d922fca..4a0561a 100644 --- a/docs/settings.md +++ b/docs/settings.md @@ -5,24 +5,24 @@ ### `WAGTAILTRANSFER_SECRET_KEY` ```python -WAGTAILTRANSFER_SECRET_KEY = '7cd5de8229be75e1e0c2af8abc2ada7e' +WAGTAILTRANSFER_SECRET_KEY = "7cd5de8229be75e1e0c2af8abc2ada7e" ``` -The secret key used to authenticate requests to import content from this site to another. The secret key in the -matching part of the importing site's `WAGTAILTRANSFER_SOURCES` must be identical, or the transfer will be rejected - -this prevents unauthorised import of sensitive data. +The secret key used to authenticate requests to import content from this site to another. The secret key in the +matching part of the importing site's `WAGTAILTRANSFER_SOURCES` must be identical, or the transfer will be rejected - +this prevents unauthorised import of sensitive data. ### `WAGTAILTRANSFER_SOURCES` ```python WAGTAILTRANSFER_SOURCES = { - 'staging': { - 'BASE_URL': 'https://staging.example.com/wagtail-transfer/', - 'SECRET_KEY': '4ac4822149691395773b2a8942e1a472', + "staging": { + "BASE_URL": "https://staging.example.com/wagtail-transfer/", + "SECRET_KEY": "4ac4822149691395773b2a8942e1a472", }, - 'production': { - 'BASE_URL': 'https://www.example.com/wagtail-transfer/', - 'SECRET_KEY': 'a36476ffc6af34dc935570d97369eca0', + "production": { + "BASE_URL": "https://www.example.com/wagtail-transfer/", + "SECRET_KEY": "a36476ffc6af34dc935570d97369eca0", }, } ``` @@ -32,44 +32,44 @@ A dictionary defining the sites available to import from, and their secret keys. ### `WAGTAILTRANSFER_UPDATE_RELATED_MODELS` ```python -WAGTAILTRANSFER_UPDATE_RELATED_MODELS = ['wagtailimages.image', 'adverts.advert'] +WAGTAILTRANSFER_UPDATE_RELATED_MODELS = ["wagtailimages.image", "adverts.advert"] ``` -Specifies a list of models that, whenever we encounter references to them in imported content, should be updated to the +Specifies a list of models that, whenever we encounter references to them in imported content, should be updated to the latest version from the source site as part of the import. -Whenever an object being imported contains a reference to a related object (through a ForeignKey, RichTextField or -StreamField), the 'importance' of that related object will tend to vary according to its type. For example, a reference -to an Image object within a page usually means that the image will be shown on that page; in this case, the Image model -is sufficiently important to the imported page that we want the importer to not only ensure that image exists at the -destination, but is updated to its newest version as well. Contrast this with the example of an 'author' snippet -attached to blog posts, containing various fields of data about that person (e.g. bio, social media links); in this -case, the author information is not really part of the blog post, and it's not expected that we would update it when +Whenever an object being imported contains a reference to a related object (through a ForeignKey, RichTextField or +StreamField), the 'importance' of that related object will tend to vary according to its type. For example, a reference +to an Image object within a page usually means that the image will be shown on that page; in this case, the Image model +is sufficiently important to the imported page that we want the importer to not only ensure that image exists at the +destination, but is updated to its newest version as well. Contrast this with the example of an 'author' snippet +attached to blog posts, containing various fields of data about that person (e.g. bio, social media links); in this +case, the author information is not really part of the blog post, and it's not expected that we would update it when running an import of blog posts. ### `WAGTAILTRANSFER_LOOKUP_FIELDS` ```python -WAGTAILTRANSFER_LOOKUP_FIELDS = {'blog.author': ['first_name', 'surname']} +WAGTAILTRANSFER_LOOKUP_FIELDS = {"blog.author": ["first_name", "surname"]} ``` Specifies a list of fields to use for object lookups on the given models. -Normally, imported objects will be assigned a random UUID known across all sites, so that those objects can be -recognised on subsequent imports and be updated rather than creating a duplicate. This behaviour is less useful for -models that already have a uniquely identifying field, or set of fields, such as an author identified by first name -and surname - if the same author exists on the source and destination site, but this was not the result of a previous -import, then the UUID-based matching will consider them distinct, and attempt to create a duplicate author record at the -destination. Adding an entry in WAGTAILTRANSFER_LOOKUP_FIELDS will mean that any imported instances of the given model +Normally, imported objects will be assigned a random UUID known across all sites, so that those objects can be +recognised on subsequent imports and be updated rather than creating a duplicate. This behaviour is less useful for +models that already have a uniquely identifying field, or set of fields, such as an author identified by first name +and surname - if the same author exists on the source and destination site, but this was not the result of a previous +import, then the UUID-based matching will consider them distinct, and attempt to create a duplicate author record at the +destination. Adding an entry in WAGTAILTRANSFER_LOOKUP_FIELDS will mean that any imported instances of the given model will be looked up based on the specified fields, rather than by UUID. The default value for `WAGTAILTRANSFER_LOOKUP_FIELDS` is: ```python { - 'taggit.tag': ['slug'], - 'wagtailcore.locale': ["language_code"], - 'contenttypes.contenttype': ['app_label', 'model'], + "taggit.tag": ["slug"], + "wagtailcore.locale": ["language_code"], + "contenttypes.contenttype": ["app_label", "model"], } ``` @@ -78,31 +78,33 @@ Overriding these values may result in issues as described above, particularly in ### `WAGTAILTRANSFER_NO_FOLLOW_MODELS` ```python -WAGTAILTRANSFER_NO_FOLLOW_MODELS = ['wagtailcore.page', 'organisations.Company'] +WAGTAILTRANSFER_NO_FOLLOW_MODELS = ["wagtailcore.page", "organisations.Company"] ``` -Specifies a list of models that should not be imported by association when they are referenced from imported content. +Specifies a list of models that should not be imported by association when they are referenced from imported content. Defaults to `['wagtailcore.page', 'contenttypes.contenttype']`. -By default, objects referenced within imported content will be recursively imported to ensure that those references are -still valid on the destination site. However, this is not always desirable - for example, if this happened for the Page -model, this would imply that any pages linked from an imported page would get imported as well, along with any pages -linked from those pages, and so on, leading to an unpredictable number of extra pages being added anywhere in the page -tree as a side-effect of the import. Models listed in WAGTAILTRANSFER_NO_FOLLOW_MODELS will thus be skipped in this -process, leaving the reference unresolved. The effect this has on the referencing page will vary according to the kind -of relation: nullable foreign keys, one-to-many and many-to-many relations will simply omit the missing object; -references in rich text and StreamField will become broken links (just as linking a page and then deleting it would); -while non-nullable foreign keys will prevent the object from being created at all (meaning that any objects referencing +By default, objects referenced within imported content will be recursively imported to ensure that those references are +still valid on the destination site. However, this is not always desirable - for example, if this happened for the Page +model, this would imply that any pages linked from an imported page would get imported as well, along with any pages +linked from those pages, and so on, leading to an unpredictable number of extra pages being added anywhere in the page +tree as a side-effect of the import. Models listed in WAGTAILTRANSFER_NO_FOLLOW_MODELS will thus be skipped in this +process, leaving the reference unresolved. The effect this has on the referencing page will vary according to the kind +of relation: nullable foreign keys, one-to-many and many-to-many relations will simply omit the missing object; +references in rich text and StreamField will become broken links (just as linking a page and then deleting it would); +while non-nullable foreign keys will prevent the object from being created at all (meaning that any objects referencing that object will end up with unresolved references, to be handled by the same set of rules). -Note that these settings do not accept models that are defined as subclasses through multi-table inheritance - in +Note that these settings do not accept models that are defined as subclasses through multi-table inheritance - in particular, they cannot be used to define behaviour that only applies to specific subclasses of Page. ### `WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS` ```python -WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS = [('wagtailimages.image', 'tagged_items', True)] +WAGTAILTRANSFER_FOLLOWED_REVERSE_RELATIONS = [ + ("wagtailimages.image", "tagged_items", True) +] ``` Specifies a list of models, their reverse relations to follow, and whether deletions should be synced, when identifying object references that should be imported to the destination site. Defaults to `[('wagtailimages.image', 'tagged_items', True)]`. @@ -145,10 +147,9 @@ class MyCustomAdapter(FieldAdapter): pass -@hooks.register('register_field_adapters') +@hooks.register("register_field_adapters") def register_my_custom_adapter(): return {models.Field: MyCustomAdapter} - ``` @@ -168,18 +169,17 @@ from wagtail_transfer.serializers import PageSerializer from myapp.models import MyModel -class MyModelCustomSerializer(PageSerializer): +class MyModelCustomSerializer(PageSerializer): ignored_fields = PageSerializer.ignored_fields + [ - 'secret_field_1', - 'environment_specific_data_field_123', - ... + "secret_field_1", + "environment_specific_data_field_123", + ..., ] pass -@hooks.register('register_custom_serializers') +@hooks.register("register_custom_serializers") def register_my_custom_serializer(): return {MyModel: MyModelCustomSerializer} - ``` diff --git a/docs/setup.md b/docs/setup.md index 1e4bb01..eb4c783 100644 --- a/docs/setup.md +++ b/docs/setup.md @@ -7,13 +7,13 @@ 3. In your project's top-level urls.py, add: from wagtail_transfer import urls as wagtailtransfer_urls - + and add: url(r'^wagtail-transfer/', include(wagtailtransfer_urls)), - + to the `urlpatterns` list above `include(wagtail_urls)`. - + 4. Add the settings `WAGTAILTRANSFER_SOURCES` and `WAGTAILTRANSFER_SECRET_KEY` to your project settings. These are formatted as: @@ -29,20 +29,19 @@ } WAGTAILTRANSFER_SECRET_KEY = '7cd5de8229be75e1e0c2af8abc2ada7e' - + However, it is best to store the `SECRET_KEY`s themselves in local environment variables for security. - + `WAGTAILTRANSFER_SOURCES` is a dictionary defining the sites available to import from, and their secret keys. - `WAGTAILTRANSFER_SECRET_KEY` and the per-source `SECRET_KEY` settings are used to authenticate the communication between the - source and destination instances; this prevents unauthorised users from using this API to retrieve sensitive data such - as password hashes. The `SECRET_KEY` for each entry in `WAGTAILTRANSFER_SOURCES` must match that instance's + `WAGTAILTRANSFER_SECRET_KEY` and the per-source `SECRET_KEY` settings are used to authenticate the communication between the + source and destination instances; this prevents unauthorised users from using this API to retrieve sensitive data such + as password hashes. The `SECRET_KEY` for each entry in `WAGTAILTRANSFER_SOURCES` must match that instance's `WAGTAILTRANSFER_SECRET_KEY`. - -Once you've followed these instructions for all your source and destination sites, you can start -[importing content](basic_usage.md). -If you need additional configuration - you want to configure which referenced models are updated, how models are identified +Once you've followed these instructions for all your source and destination sites, you can start +[importing content](basic_usage.md). + +If you need additional configuration - you want to configure which referenced models are updated, how models are identified between Wagtail instances, or which models are pulled in and imported from references on an imported page, you can check out [how mappings and references work](how_it_works.md) and the [settings reference](settings.md). - From ccea827e3b7a1fd2664ed79f541be01d47dfb16e Mon Sep 17 00:00:00 2001 From: Joshua Munn Date: Wed, 25 Oct 2023 14:01:50 +0100 Subject: [PATCH 09/10] Add .git-blame-ignore-revs --- .git-blame-ignore-revs | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000..50e5759 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,6 @@ +# Set up ruff and pre-commit, apply fixes +84f991d844dcbfc29e4ce9a0b0afb3846c7dcd06 +c6cf87660d0e685121d8c9e62729c30c4641df07 +b83aab8c4442b3e1cf95950a5b1fcbae0d7ad5b1 +e166089173b2337803ba17cdff051d24b4b0515e +ca3b4000068d19e736149a038b0d479e5e78b8e9 From 3319c8843846010f581b6211f7d05d5c954adeba Mon Sep 17 00:00:00 2001 From: Joshua Munn Date: Wed, 25 Oct 2023 14:04:50 +0100 Subject: [PATCH 10/10] Add lint job to github test workflow --- .github/workflows/test.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 799ddc1..396ffc6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -9,6 +9,17 @@ on: pull_request: jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: actions/setup-python@v4 + with: + python-version: ${{env.PYTHON_LATEST}} + - uses: pre-commit/action@v3.0.0 + test: runs-on: ubuntu-latest strategy: