From b97d25f4e38404eaef3282282ff8295e50d8b973 Mon Sep 17 00:00:00 2001
From: Nigel Gott <nigel@baserow.io>
Date: Thu, 6 Jul 2023 17:10:51 +0100
Subject: [PATCH] Ensure templates and snapshots dont have tsvs made to save
 disk space.

---
 .../contrib/builder/application_types.py      |  10 +-
 .../contrib/builder/domains/handler.py        |   9 +-
 .../contrib/database/airtable/handler.py      |   8 ++
 .../contrib/database/application_types.py     |  60 +++++-----
 .../contrib/database/fields/field_types.py    |  31 +++++-
 .../contrib/database/fields/registries.py     |   7 +-
 .../contrib/database/search/handler.py        |   3 +-
 .../baserow/contrib/database/table/handler.py |   9 +-
 backend/src/baserow/core/handler.py           |  43 +++++---
 .../commands/export_workspace_applications.py |   6 +
 .../commands/import_workspace_applications.py |   7 +-
 backend/src/baserow/core/registries.py        | 103 ++++++++----------
 backend/src/baserow/core/snapshots/handler.py |  15 ++-
 .../builder/test_builder_application_type.py  |   8 +-
 .../database/field/test_count_field_type.py   |  10 +-
 .../field/test_created_on_field_type.py       |   6 +-
 .../database/field/test_date_field_type.py    |   7 +-
 .../database/field/test_field_types.py        |  14 ++-
 .../database/field/test_file_field_type.py    |  10 +-
 .../field/test_last_modified_field_type.py    |   7 +-
 .../field/test_link_row_field_type.py         |   7 +-
 .../database/field/test_lookup_field_type.py  |  18 ++-
 .../test_multiple_collaborators_field_type.py |   6 +-
 .../field/test_multiple_select_field_type.py  |  13 ++-
 .../database/field/test_number_field_type.py  |   7 +-
 .../database/field/test_rollup_field_type.py  |   6 +-
 .../field/test_single_select_field_type.py    |  18 ++-
 .../test_database_application_type.py         |  12 +-
 .../tests/baserow/core/test_core_handler.py   |   8 +-
 .../tests/baserow/core/test_core_registry.py  |  36 ------
 .../src/baserow_enterprise/structure_types.py |  19 +++-
 .../test_registries.py                        |  26 ++++-
 32 files changed, 336 insertions(+), 213 deletions(-)

diff --git a/backend/src/baserow/contrib/builder/application_types.py b/backend/src/baserow/contrib/builder/application_types.py
index c5176f135..fa26ef9fe 100755
--- a/backend/src/baserow/contrib/builder/application_types.py
+++ b/backend/src/baserow/contrib/builder/application_types.py
@@ -18,7 +18,7 @@ from baserow.contrib.builder.types import BuilderDict, PageDict
 from baserow.contrib.database.constants import IMPORT_SERIALIZED_IMPORTING
 from baserow.core.db import specific_iterator
 from baserow.core.models import Application, Workspace
-from baserow.core.registries import ApplicationType, BaserowImportExportMode
+from baserow.core.registries import ApplicationType, ImportExportConfig
 from baserow.core.utils import ChildProgressBuilder
 
 
@@ -86,11 +86,9 @@ class BuilderApplicationType(ApplicationType):
     def export_serialized(
         self,
         builder: Builder,
+        import_export_config: ImportExportConfig,
         files_zip: Optional[ZipFile] = None,
         storage: Optional[Storage] = None,
-        baserow_import_export_mode: Optional[
-            BaserowImportExportMode
-        ] = BaserowImportExportMode.TARGETING_SAME_WORKSPACE_NEW_PK,
     ) -> BuilderDict:
         """
         Exports the builder application type to a serialized format that can later
@@ -104,7 +102,7 @@ class BuilderApplicationType(ApplicationType):
         serialized_pages = self.export_pages_serialized(pages, files_zip, storage)
 
         serialized = super().export_serialized(
-            builder, files_zip, storage, baserow_import_export_mode
+            builder, import_export_config, files_zip, storage
         )
 
         return BuilderDict(pages=serialized_pages, **serialized)
@@ -194,6 +192,7 @@ class BuilderApplicationType(ApplicationType):
         self,
         workspace: Workspace,
         serialized_values: Dict[str, Any],
+        import_export_config: ImportExportConfig,
         id_mapping: Dict[str, Any],
         files_zip: Optional[ZipFile] = None,
         storage: Optional[Storage] = None,
@@ -212,6 +211,7 @@ class BuilderApplicationType(ApplicationType):
         application = super().import_serialized(
             workspace,
             serialized_values,
+            import_export_config,
             id_mapping,
             files_zip,
             storage,
diff --git a/backend/src/baserow/contrib/builder/domains/handler.py b/backend/src/baserow/contrib/builder/domains/handler.py
index 519318815..73479b806 100644
--- a/backend/src/baserow/contrib/builder/domains/handler.py
+++ b/backend/src/baserow/contrib/builder/domains/handler.py
@@ -12,7 +12,7 @@ from baserow.contrib.builder.domains.models import Domain
 from baserow.contrib.builder.exceptions import BuilderDoesNotExist
 from baserow.contrib.builder.models import Builder
 from baserow.core.exceptions import IdDoesNotExist
-from baserow.core.registries import application_type_registry
+from baserow.core.registries import ImportExportConfig, application_type_registry
 from baserow.core.trash.handler import TrashHandler
 from baserow.core.utils import Progress
 
@@ -163,8 +163,12 @@ class DomainHandler:
 
         builder_application_type = application_type_registry.get("builder")
 
+        import_export_config = ImportExportConfig(
+            include_permission_data=True, reduce_disk_space_usage=False
+        )
+
         exported_builder = builder_application_type.export_serialized(
-            builder, None, default_storage
+            builder, import_export_config, None, default_storage
         )
 
         progress.increment(by=50)
@@ -173,6 +177,7 @@ class DomainHandler:
         duplicate_builder = builder_application_type.import_serialized(
             None,
             exported_builder,
+            import_export_config,
             id_mapping,
             None,
             default_storage,
diff --git a/backend/src/baserow/contrib/database/airtable/handler.py b/backend/src/baserow/contrib/database/airtable/handler.py
index 687a22c38..37949bdcd 100644
--- a/backend/src/baserow/contrib/database/airtable/handler.py
+++ b/backend/src/baserow/contrib/database/airtable/handler.py
@@ -32,6 +32,7 @@ from baserow.contrib.database.views.registries import view_type_registry
 from baserow.core.export_serialized import CoreExportSerializedStructure
 from baserow.core.handler import CoreHandler
 from baserow.core.models import Workspace
+from baserow.core.registries import ImportExportConfig
 from baserow.core.utils import ChildProgressBuilder, remove_invalid_surrogate_characters
 
 from .exceptions import AirtableBaseNotPublic, AirtableShareIsNotABase
@@ -608,11 +609,18 @@ class AirtableHandler:
             download_files_buffer,
         )
 
+        import_export_config = ImportExportConfig(
+            # We are not yet downloading any role/permission data from airtable so
+            # nothing to import
+            include_permission_data=False,
+            reduce_disk_space_usage=False,
+        )
         # Import the converted data using the existing method to avoid duplicate code.
         databases, _ = CoreHandler().import_applications_to_workspace(
             workspace,
             [baserow_database_export],
             files_buffer,
+            import_export_config,
             storage=storage,
             progress_builder=progress.create_child_builder(represents_progress=600),
         )
diff --git a/backend/src/baserow/contrib/database/application_types.py b/backend/src/baserow/contrib/database/application_types.py
index ce86ede2e..42e87ad5f 100755
--- a/backend/src/baserow/contrib/database/application_types.py
+++ b/backend/src/baserow/contrib/database/application_types.py
@@ -23,7 +23,7 @@ from baserow.contrib.database.views.registries import view_type_registry
 from baserow.core.models import Application, Workspace
 from baserow.core.registries import (
     ApplicationType,
-    BaserowImportExportMode,
+    ImportExportConfig,
     serialization_processor_registry,
 )
 from baserow.core.trash.handler import TrashHandler
@@ -70,11 +70,9 @@ class DatabaseApplicationType(ApplicationType):
     def export_tables_serialized(
         self,
         tables: List[Table],
+        import_export_config: ImportExportConfig,
         files_zip: Optional[ZipFile] = None,
         storage: Optional[Storage] = None,
-        baserow_import_export_mode: Optional[
-            BaserowImportExportMode
-        ] = BaserowImportExportMode.TARGETING_SAME_WORKSPACE_NEW_PK,
     ) -> List[Dict[str, Any]]:
         """
         Exports the tables provided  to a serialized format that can later be
@@ -127,26 +125,22 @@ class DatabaseApplicationType(ApplicationType):
                 views=serialized_views,
                 rows=serialized_rows,
             )
-            # Annotate any `SerializationProcessorType` we have.
-            for (
-                serialized_structure
-            ) in serialization_processor_registry.get_all_for_mode(
-                baserow_import_export_mode
-            ):
-                structure.update(
-                    **serialized_structure.export_serialized(workspace, table)
+
+            for serialized_structure in serialization_processor_registry.get_all():
+                extra_data = serialized_structure.export_serialized(
+                    workspace, table, import_export_config
                 )
+                if extra_data is not None:
+                    structure.update(**extra_data)
             serialized_tables.append(structure)
         return serialized_tables
 
     def export_serialized(
         self,
         database: Database,
+        import_export_config: ImportExportConfig,
         files_zip: Optional[ZipFile] = None,
         storage: Optional[Storage] = None,
-        baserow_import_export_mode: Optional[
-            BaserowImportExportMode
-        ] = BaserowImportExportMode.TARGETING_SAME_WORKSPACE_NEW_PK,
     ) -> Dict[str, Any]:
         """
         Exports the database application type to a serialized format that can later
@@ -161,11 +155,11 @@ class DatabaseApplicationType(ApplicationType):
         )
 
         serialized_tables = self.export_tables_serialized(
-            tables, files_zip, storage, baserow_import_export_mode
+            tables, import_export_config, files_zip, storage
         )
 
         serialized = super().export_serialized(
-            database, files_zip, storage, baserow_import_export_mode
+            database, import_export_config, files_zip, storage
         )
         serialized.update(
             **DatabaseExportSerializedStructure.database(tables=serialized_tables)
@@ -224,13 +218,11 @@ class DatabaseApplicationType(ApplicationType):
         database: Database,
         serialized_tables: List[Dict[str, Any]],
         id_mapping: Dict[str, Any],
+        import_export_config: ImportExportConfig,
         files_zip: Optional[ZipFile] = None,
         storage: Optional[Storage] = None,
         progress_builder: Optional[ChildProgressBuilder] = None,
         external_table_fields_to_import: List[Tuple[Table, Dict[str, Any]]] = None,
-        baserow_import_export_mode: Optional[
-            BaserowImportExportMode
-        ] = BaserowImportExportMode.TARGETING_SAME_WORKSPACE_NEW_PK,
     ) -> List[Table]:
         """
         Imports tables exported by the `export_tables_serialized` method. Look at
@@ -252,9 +244,8 @@ class DatabaseApplicationType(ApplicationType):
             field to import.
             Useful for when importing a single table which also needs to add related
             fields to other existing tables in the database.
-        :param baserow_import_export_mode: defines which Baserow import/export mode to
-            use, defaults to `TARGETING_SAME_WORKSPACE_NEW_PK`.
-        :type baserow_import_export_mode: enum
+        :param import_export_config: provides configuration options for the
+            import/export process to customize how it works.
         :return: The list of created tables
         """
 
@@ -302,6 +293,7 @@ class DatabaseApplicationType(ApplicationType):
                 field_instance = field_type.import_serialized(
                     serialized_table["_object"],
                     serialized_field,
+                    import_export_config,
                     id_mapping,
                     deferred_fk_update_collector,
                 )
@@ -315,6 +307,7 @@ class DatabaseApplicationType(ApplicationType):
             external_field = field_type.import_serialized(
                 external_table,
                 serialized_field,
+                import_export_config,
                 id_mapping,
                 deferred_fk_update_collector,
             )
@@ -494,14 +487,13 @@ class DatabaseApplicationType(ApplicationType):
         source_workspace = Workspace.objects.get(pk=id_mapping["import_workspace_id"])
         for serialized_table in serialized_tables:
             table = serialized_table["_object"]
-            SearchHandler.entire_field_values_changed_or_created(table)
+            if not import_export_config.reduce_disk_space_usage:
+                SearchHandler.entire_field_values_changed_or_created(table)
             for (
-                serialized_structure
-            ) in serialization_processor_registry.get_all_for_mode(
-                baserow_import_export_mode
-            ):
-                serialized_structure.import_serialized(
-                    source_workspace, table, serialized_table
+                serialized_structure_processor
+            ) in serialization_processor_registry.get_all():
+                serialized_structure_processor.import_serialized(
+                    source_workspace, table, serialized_table, import_export_config
                 )
 
         return imported_tables
@@ -510,13 +502,11 @@ class DatabaseApplicationType(ApplicationType):
         self,
         workspace: Workspace,
         serialized_values: Dict[str, Any],
+        import_export_config: ImportExportConfig,
         id_mapping: Dict[str, Any],
         files_zip: Optional[ZipFile] = None,
         storage: Optional[Storage] = None,
         progress_builder: Optional[ChildProgressBuilder] = None,
-        baserow_import_export_mode: Optional[
-            BaserowImportExportMode
-        ] = BaserowImportExportMode.TARGETING_SAME_WORKSPACE_NEW_PK,
     ) -> Application:
         """
         Imports a database application exported by the `export_serialized` method.
@@ -530,11 +520,11 @@ class DatabaseApplicationType(ApplicationType):
         application = super().import_serialized(
             workspace,
             serialized_values,
+            import_export_config,
             id_mapping,
             files_zip,
             storage,
             progress.create_child_builder(represents_progress=database_progress),
-            baserow_import_export_mode=baserow_import_export_mode,
         )
 
         database = application.specific
@@ -546,10 +536,10 @@ class DatabaseApplicationType(ApplicationType):
                 database,
                 serialized_values["tables"],
                 id_mapping,
+                import_export_config,
                 files_zip,
                 storage,
                 progress.create_child_builder(represents_progress=table_progress),
-                baserow_import_export_mode=baserow_import_export_mode,
             )
 
         return database
diff --git a/backend/src/baserow/contrib/database/fields/field_types.py b/backend/src/baserow/contrib/database/fields/field_types.py
index 91db5eadf..1cae899ca 100755
--- a/backend/src/baserow/contrib/database/fields/field_types.py
+++ b/backend/src/baserow/contrib/database/fields/field_types.py
@@ -83,12 +83,14 @@ from baserow.contrib.database.validators import UnicodeRegexValidator
 from baserow.core.fields import SyncedDateTimeField
 from baserow.core.handler import CoreHandler
 from baserow.core.models import UserFile, WorkspaceUser
+from baserow.core.registries import ImportExportConfig
 from baserow.core.user_files.exceptions import UserFileDoesNotExist
 from baserow.core.user_files.handler import UserFileHandler
 from baserow.core.utils import list_to_comma_separated_string
 from baserow.formula import BaserowFormulaException
 from baserow.formula.exceptions import FormulaFunctionTypeDoesNotExist
 
+from ..search.handler import SearchHandler
 from .constants import UPSERT_OPTION_DICT_KEY
 from .deferred_field_fk_updater import DeferredFieldFkUpdater
 from .dependencies.exceptions import (
@@ -2002,6 +2004,7 @@ class LinkRowFieldType(FieldType):
         self,
         table: "Table",
         serialized_values: Dict[str, Any],
+        import_export_config: ImportExportConfig,
         id_mapping: Dict[str, Any],
         deferred_fk_update_collector: DeferredFieldFkUpdater,
     ) -> Optional[Field]:
@@ -2034,7 +2037,11 @@ class LinkRowFieldType(FieldType):
             serialized_copy["link_row_relation_id"] = related_field.link_row_relation_id
 
         field = super().import_serialized(
-            table, serialized_copy, id_mapping, deferred_fk_update_collector
+            table,
+            serialized_copy,
+            import_export_config,
+            id_mapping,
+            deferred_fk_update_collector,
         )
 
         if related_field_found:
@@ -3498,6 +3505,7 @@ class FormulaFieldType(ReadOnlyFieldType):
 
         if should_send_signals_at_end:
             update_collector.apply_updates_and_get_updated_fields(field_cache)
+            SearchHandler().entire_field_values_changed_or_created(field.table, [field])
             update_collector.send_force_refresh_signals_for_all_updated_tables()
 
     def row_of_dependency_updated(
@@ -3810,6 +3818,7 @@ class CountFieldType(FormulaFieldType):
         self,
         table: "Table",
         serialized_values: Dict[str, Any],
+        import_export_config: ImportExportConfig,
         id_mapping: Dict[str, Any],
         deferred_fk_update_collector: DeferredFieldFkUpdater,
     ) -> "Field":
@@ -3819,7 +3828,11 @@ class CountFieldType(FormulaFieldType):
         # the mapping.
         original_through_field_id = serialized_copy.pop("through_field_id")
         field = super().import_serialized(
-            table, serialized_copy, id_mapping, deferred_fk_update_collector
+            table,
+            serialized_copy,
+            import_export_config,
+            id_mapping,
+            deferred_fk_update_collector,
         )
         deferred_fk_update_collector.add_deferred_fk_to_update(
             field, "through_field_id", original_through_field_id
@@ -3956,6 +3969,7 @@ class RollupFieldType(FormulaFieldType):
         self,
         table: "Table",
         serialized_values: Dict[str, Any],
+        import_export_config: ImportExportConfig,
         id_mapping: Dict[str, Any],
         deferred_fk_update_collector: DeferredFieldFkUpdater,
     ) -> "Field":
@@ -3966,7 +3980,11 @@ class RollupFieldType(FormulaFieldType):
         original_through_field_id = serialized_copy.pop("through_field_id")
         original_target_field_id = serialized_copy.pop("target_field_id")
         field = super().import_serialized(
-            table, serialized_copy, id_mapping, deferred_fk_update_collector
+            table,
+            serialized_copy,
+            import_export_config,
+            id_mapping,
+            deferred_fk_update_collector,
         )
         deferred_fk_update_collector.add_deferred_fk_to_update(
             field, "through_field_id", original_through_field_id
@@ -4192,6 +4210,7 @@ class LookupFieldType(FormulaFieldType):
         self,
         table: "Table",
         serialized_values: Dict[str, Any],
+        import_export_config: ImportExportConfig,
         id_mapping: Dict[str, Any],
         deferred_fk_update_collector: DeferredFieldFkUpdater,
     ) -> "Field":
@@ -4202,7 +4221,11 @@ class LookupFieldType(FormulaFieldType):
         original_through_field_id = serialized_copy.pop("through_field_id")
         original_target_field_id = serialized_copy.pop("target_field_id")
         field = super().import_serialized(
-            table, serialized_copy, id_mapping, deferred_fk_update_collector
+            table,
+            serialized_copy,
+            import_export_config,
+            id_mapping,
+            deferred_fk_update_collector,
         )
         deferred_fk_update_collector.add_deferred_fk_to_update(
             field, "through_field_id", original_through_field_id
diff --git a/backend/src/baserow/contrib/database/fields/registries.py b/backend/src/baserow/contrib/database/fields/registries.py
index c93380648..81df7eea4 100644
--- a/backend/src/baserow/contrib/database/fields/registries.py
+++ b/backend/src/baserow/contrib/database/fields/registries.py
@@ -18,6 +18,7 @@ from django.db.models.functions import Cast
 
 from baserow.contrib.database.fields.constants import UPSERT_OPTION_DICT_KEY
 from baserow.contrib.database.fields.field_sortings import OptionallyAnnotatedOrderBy
+from baserow.core.registries import ImportExportConfig
 from baserow.core.registry import (
     APIUrlsInstanceMixin,
     APIUrlsRegistryMixin,
@@ -786,6 +787,7 @@ class FieldType(
         self,
         table: "Table",
         serialized_values: Dict[str, Any],
+        import_export_config: ImportExportConfig,
         id_mapping: Dict[str, Any],
         deferred_fk_update_collector: DeferredFieldFkUpdater,
     ) -> Field:
@@ -798,6 +800,8 @@ class FieldType(
             be imported.
         :param id_mapping: The map of exported ids to newly created ids that must be
             updated when a new instance has been created.
+        :param import_export_config: provides configuration options for the
+            import/export process to customize how it works.
         :param deferred_fk_update_collector: An object than can be used to defer
             setting FK's to other fields until after all fields have been created
             and we know their IDs.
@@ -818,9 +822,10 @@ class FieldType(
             if self.can_have_select_options
             else []
         )
+        should_create_tsvector_column = not import_export_config.reduce_disk_space_usage
         field = self.model_class(
             table=table,
-            tsvector_column_created=table.tsvectors_are_supported,
+            tsvector_column_created=should_create_tsvector_column,
             **serialized_copy,
         )
         field.save()
diff --git a/backend/src/baserow/contrib/database/search/handler.py b/backend/src/baserow/contrib/database/search/handler.py
index cb1f3877f..fcc3782a0 100644
--- a/backend/src/baserow/contrib/database/search/handler.py
+++ b/backend/src/baserow/contrib/database/search/handler.py
@@ -338,7 +338,7 @@ class SearchHandler(
             and field_ids_to_restrict_update_to is not None
         ):
             raise ValueError(
-                "Mst always update all fields when updating rows "
+                "Must always update all fields when updating rows "
                 "with needs_background_update=True."
             )
 
@@ -369,6 +369,7 @@ class SearchHandler(
         was_full_column_update = not update_tsvectors_for_changed_rows_only
         if (
             was_full_column_update
+            and collected_vectors
             and settings.AUTO_VACUUM_AFTER_SEARCH_UPDATE
             and not settings.TESTS
         ):
diff --git a/backend/src/baserow/contrib/database/table/handler.py b/backend/src/baserow/contrib/database/table/handler.py
index a3732cf21..fa33626e6 100644
--- a/backend/src/baserow/contrib/database/table/handler.py
+++ b/backend/src/baserow/contrib/database/table/handler.py
@@ -32,7 +32,7 @@ from baserow.contrib.database.rows.handler import RowHandler
 from baserow.contrib.database.views.handler import ViewHandler
 from baserow.contrib.database.views.view_types import GridViewType
 from baserow.core.handler import CoreHandler
-from baserow.core.registries import application_type_registry
+from baserow.core.registries import ImportExportConfig, application_type_registry
 from baserow.core.telemetry.utils import baserow_trace_methods
 from baserow.core.trash.handler import TrashHandler
 from baserow.core.utils import ChildProgressBuilder, Progress, find_unused_name
@@ -533,7 +533,11 @@ class TableHandler(metaclass=baserow_trace_methods(tracer)):
 
         database_type = application_type_registry.get_by_model(database)
 
-        serialized_tables = database_type.export_tables_serialized([table])
+        config = ImportExportConfig(
+            include_permission_data=True, reduce_disk_space_usage=False
+        )
+
+        serialized_tables = database_type.export_tables_serialized([table], config)
 
         # Set a unique name for the table to import back as a new one.
         exported_table = serialized_tables[0]
@@ -570,6 +574,7 @@ class TableHandler(metaclass=baserow_trace_methods(tracer)):
             database,
             [exported_table],
             id_mapping,
+            config,
             external_table_fields_to_import=link_fields_to_import_to_existing_tables,
             progress_builder=progress.create_child_builder(
                 represents_progress=import_progress
diff --git a/backend/src/baserow/core/handler.py b/backend/src/baserow/core/handler.py
index 1d9d23b66..294cbb77c 100755
--- a/backend/src/baserow/core/handler.py
+++ b/backend/src/baserow/core/handler.py
@@ -72,7 +72,7 @@ from .operations import (
     UpdateWorkspaceUserOperationType,
 )
 from .registries import (
-    BaserowImportExportMode,
+    ImportExportConfig,
     application_type_registry,
     object_scope_type_registry,
     operation_type_registry,
@@ -1355,11 +1355,16 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
         progress = ChildProgressBuilder.build(progress_builder, child_total=100)
         progress.increment(by=start_progress)
 
+        duplicate_import_export_config = ImportExportConfig(
+            include_permission_data=True, reduce_disk_space_usage=False
+        )
         # export the application
         specific_application = application.specific
         application_type = application_type_registry.get_by_model(specific_application)
         try:
-            serialized = application_type.export_serialized(specific_application)
+            serialized = application_type.export_serialized(
+                specific_application, duplicate_import_export_config
+            )
         except OperationalError as e:
             # Detect if this `OperationalError` is due to us exceeding the
             # lock count in `max_locks_per_transaction`. If it is, we'll
@@ -1382,6 +1387,7 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
         new_application_clone = application_type.import_serialized(
             workspace,
             serialized,
+            duplicate_import_export_config,
             id_mapping,
             progress_builder=progress.create_child_builder(
                 represents_progress=import_progress
@@ -1478,10 +1484,8 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
         self,
         workspace,
         files_buffer,
+        import_export_config: ImportExportConfig,
         storage=None,
-        baserow_import_export_mode: Optional[
-            BaserowImportExportMode
-        ] = BaserowImportExportMode.TARGETING_DIFF_WORKSPACE_NEW_PK,
     ):
         """
         Exports the applications of a workspace to a list. They can later be imported
@@ -1498,9 +1502,8 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
         :type files_buffer: IOBase
         :param storage: The storage where the files can be loaded from.
         :type storage: Storage or None
-        :param baserow_import_export_mode: defines which Baserow import/export mode to
-            use, defaults to `TARGETING_DIFF_WORKSPACE_NEW_PK`.
-        :type baserow_import_export_mode: enum
+        :param import_export_config: provides configuration options for the
+            import/export process to customize how it works.
         :return: A list containing the exported applications.
         :rtype: list
         """
@@ -1516,7 +1519,7 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
                 application_type = application_type_registry.get_by_model(application)
                 with application_type.export_safe_transaction_context(application):
                     exported_application = application_type.export_serialized(
-                        application, files_zip, storage, baserow_import_export_mode
+                        application, import_export_config, files_zip, storage
                     )
                 exported_applications.append(exported_application)
 
@@ -1527,11 +1530,9 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
         workspace: Workspace,
         exported_applications: List[Dict[str, Any]],
         files_buffer: IO[bytes],
+        import_export_config: ImportExportConfig,
         storage: Optional[Storage] = None,
         progress_builder: Optional[ChildProgressBuilder] = None,
-        baserow_import_export_mode: Optional[
-            BaserowImportExportMode
-        ] = BaserowImportExportMode.TARGETING_DIFF_WORKSPACE_NEW_PK,
     ) -> Tuple[List[Application], Dict[str, Any]]:
         """
         Imports multiple exported applications into the given workspace. It is
@@ -1547,9 +1548,8 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
         :param storage: The storage where the files can be copied to.
         :param progress_builder: If provided will be used to build a child progress bar
             and report on this methods progress to the parent of the progress_builder.
-        :param baserow_import_export_mode: defines which Baserow import/export mode to
-            use, defaults to `TARGETING_DIFF_WORKSPACE_NEW_PK`.
-        :type baserow_import_export_mode: enum
+        :param import_export_config: provides configuration options for the
+            import/export process to customize how it works.
         :return: The newly created applications based on the import and a dict
             containing a mapping of old ids to new ids.
         """
@@ -1570,13 +1570,13 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
                 imported_application = application_type.import_serialized(
                     workspace,
                     application,
+                    import_export_config,
                     id_mapping,
                     files_zip,
                     storage,
                     progress_builder=progress.create_child_builder(
                         represents_progress=1000
                     ),
-                    baserow_import_export_mode=baserow_import_export_mode,
                 )
                 imported_application.order = next_application_order_value
                 next_application_order_value += 1
@@ -1644,6 +1644,13 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
         )
         installed_categories = list(TemplateCategory.objects.all())
 
+        sync_templates_import_export_config = ImportExportConfig(
+            include_permission_data=False,
+            # Without reducing disk space usage Baserow after first time install
+            # takes up over 1GB of disk space.
+            reduce_disk_space_usage=True,
+        )
+
         # Loop over the JSON template files in the directory to see which database
         # templates need to be created or updated.
         templates = list(
@@ -1701,6 +1708,7 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
                     workspace,
                     parsed_json["export"],
                     files_buffer=files_buffer,
+                    import_export_config=sync_templates_import_export_config,
                     storage=storage,
                 )
 
@@ -1820,6 +1828,9 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
             workspace,
             parsed_json["export"],
             files_buffer=files_buffer,
+            import_export_config=ImportExportConfig(
+                include_permission_data=False, reduce_disk_space_usage=False
+            ),
             storage=storage,
             progress_builder=progress_builder,
         )
diff --git a/backend/src/baserow/core/management/commands/export_workspace_applications.py b/backend/src/baserow/core/management/commands/export_workspace_applications.py
index e04c23cf1..d7909dc1b 100644
--- a/backend/src/baserow/core/management/commands/export_workspace_applications.py
+++ b/backend/src/baserow/core/management/commands/export_workspace_applications.py
@@ -6,6 +6,11 @@ from django.core.management.base import BaseCommand
 
 from baserow.core.handler import CoreHandler
 from baserow.core.models import Workspace
+from baserow.core.registries import ImportExportConfig
+
+cli_import_export_config = ImportExportConfig(
+    include_permission_data=False, reduce_disk_space_usage=False
+)
 
 
 class Command(BaseCommand):
@@ -62,6 +67,7 @@ class Command(BaseCommand):
             exported_applications = CoreHandler().export_workspace_applications(
                 workspace,
                 files_buffer=files_buffer,
+                import_export_config=cli_import_export_config,
             )
 
         with open(export_path, "w") as export_buffer:
diff --git a/backend/src/baserow/core/management/commands/import_workspace_applications.py b/backend/src/baserow/core/management/commands/import_workspace_applications.py
index 1cd78d9c6..f838e2572 100644
--- a/backend/src/baserow/core/management/commands/import_workspace_applications.py
+++ b/backend/src/baserow/core/management/commands/import_workspace_applications.py
@@ -6,6 +6,9 @@ from django.core.management.base import BaseCommand
 from django.db import transaction
 
 from baserow.core.handler import CoreHandler
+from baserow.core.management.commands.export_workspace_applications import (
+    cli_import_export_config,
+)
 from baserow.core.models import Workspace
 
 
@@ -66,9 +69,7 @@ class Command(BaseCommand):
             # By default, we won't import any registry data. This is because
             # `RoleAssignment` can't be imported if the subjects are teams.
             applications, _ = handler.import_applications_to_workspace(
-                workspace,
-                content,
-                files_buffer,
+                workspace, content, files_buffer, cli_import_export_config
             )
 
             if files_buffer:
diff --git a/backend/src/baserow/core/registries.py b/backend/src/baserow/core/registries.py
index 96ef0e2b2..7d7895542 100755
--- a/backend/src/baserow/core/registries.py
+++ b/backend/src/baserow/core/registries.py
@@ -1,6 +1,6 @@
 import abc
+import dataclasses
 from collections import defaultdict
-from enum import Enum
 from functools import cached_property
 from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, TypeVar, Union
 from xmlrpc.client import Boolean
@@ -33,9 +33,7 @@ from .export_serialized import CoreExportSerializedStructure
 from .registry import (
     APIUrlsInstanceMixin,
     APIUrlsRegistryMixin,
-    ImportExportMixin,
     Instance,
-    InstanceSubClass,
     MapAPIExceptionsInstanceMixin,
     ModelInstanceMixin,
     ModelRegistryMixin,
@@ -61,19 +59,29 @@ if TYPE_CHECKING:
     )
 
 
-class BaserowImportExportMode(Enum):
-    """
-    Responsible for informing our import and export functionality
-    what kind of mode to support:
+@dataclasses.dataclass
+class ImportExportConfig:
 
-    targetingSameWorkspaceWithNewIds: we are exporting and importing into
-        the same workspace, with new primary keys (not preserving IDs).
-    targetingDifferentWorkspaceWithNewIds: we are exporting and importing
-        into a new workspace, with new primary keys (not preserving IDs).
+    """
+    When true the export/import will also transfer any permission data.
+
+    For example when exporting to JSON we don't want to include RBAC data as we would
+    also need to export all the subjects, so setting this to False will exclude
+    RBAC roles from the export.
     """
 
-    TARGETING_SAME_WORKSPACE_NEW_PK = "targetingSameWorkspaceWithNewIds"
-    TARGETING_DIFF_WORKSPACE_NEW_PK = "targetingDifferentWorkspaceWithNewIds"
+    include_permission_data: bool
+
+    """
+    Whether or not the import/export should attempt to save disk space by excluding
+    certain pieces of optional data or processes that could instead be done later or
+    not used at all.
+
+    For example, this configures the database when True to not create/populate
+    tsvector full text search columns as they can also be lazy loaded after the import
+    when the user opens a view.
+    """
+    reduce_disk_space_usage: bool = False
 
 
 class Plugin(APIUrlsInstanceMixin, Instance):
@@ -199,7 +207,6 @@ class PluginRegistry(APIUrlsRegistryMixin, Registry):
 class ApplicationType(
     APIUrlsInstanceMixin,
     ModelInstanceMixin["Application"],
-    ImportExportMixin["Application"],
     Instance,
 ):
     """
@@ -302,7 +309,7 @@ class ApplicationType(
         workspace: "Workspace",
         scope,
         exported_structure: dict,
-        baserow_import_export_mode: BaserowImportExportMode,
+        import_export_config: ImportExportConfig,
     ) -> dict:
         """
         Given a serialized dictionary generated by `export_serialized`, this method
@@ -310,12 +317,12 @@ class ApplicationType(
         that needs to be added to the serialized structure.
         """
 
-        for serialized_structure in serialization_processor_registry.get_all_for_mode(
-            baserow_import_export_mode
-        ):
-            exported_structure.update(
-                **serialized_structure.export_serialized(workspace, scope)
+        for serialized_structure in serialization_processor_registry.get_all():
+            data = serialized_structure.export_serialized(
+                workspace, scope, import_export_config
             )
+            if data is not None:
+                exported_structure.update(**data)
         return exported_structure
 
     def import_serialized_structure_with_registry(
@@ -323,7 +330,7 @@ class ApplicationType(
         id_mapping: Dict[str, Any],
         scope,
         serialized_scope: dict,
-        baserow_import_export_mode: BaserowImportExportMode,
+        import_export_config: ImportExportConfig,
         workspace: Optional["Workspace"] = None,
     ) -> None:
         """
@@ -340,21 +347,17 @@ class ApplicationType(
                 pk=id_mapping["import_workspace_id"]
             )
 
-        for serialized_structure in serialization_processor_registry.get_all_for_mode(
-            baserow_import_export_mode
-        ):
+        for serialized_structure in serialization_processor_registry.get_all():
             serialized_structure.import_serialized(
-                source_workspace, scope, serialized_scope
+                source_workspace, scope, serialized_scope, import_export_config
             )
 
     def export_serialized(
         self,
         application: "Application",
+        import_export_config: ImportExportConfig,
         files_zip: Optional[ZipFile] = None,
         storage: Optional[Storage] = None,
-        baserow_import_export_mode: Optional[
-            BaserowImportExportMode
-        ] = BaserowImportExportMode.TARGETING_SAME_WORKSPACE_NEW_PK,
     ):
         """
         Exports the application to a serialized dict that can be imported by the
@@ -367,9 +370,8 @@ class ApplicationType(
         :type files_zip: ZipFile
         :param storage: The storage where the files can be loaded from.
         :type storage: Storage or None
-        :param baserow_import_export_mode: defines which Baserow import/export mode to
-            use, defaults to `TARGETING_SAME_WORKSPACE_NEW_PK`.
-        :type baserow_import_export_mode: enum
+        :param import_export_config: provides configuration options for the
+            import/export process to customize how it works.
         :return: The exported and serialized application.
         :rtype: dict
         """
@@ -382,7 +384,7 @@ class ApplicationType(
         )
         # Annotate any `SerializationProcessorType` we have.
         structure = self.export_serialized_structure_with_registry(
-            application.get_root(), application, structure, baserow_import_export_mode
+            application.get_root(), application, structure, import_export_config
         )
         return structure
 
@@ -390,13 +392,11 @@ class ApplicationType(
         self,
         workspace: "Workspace",
         serialized_values: Dict[str, Any],
+        import_export_config: ImportExportConfig,
         id_mapping: Dict[str, Any],
         files_zip: Optional[ZipFile] = None,
         storage: Optional[Storage] = None,
         progress_builder: Optional[ChildProgressBuilder] = None,
-        baserow_import_export_mode: Optional[
-            BaserowImportExportMode
-        ] = BaserowImportExportMode.TARGETING_SAME_WORKSPACE_NEW_PK,
     ) -> "Application":
         """
         Imports the exported serialized application by the `export_serialized` as a new
@@ -412,9 +412,8 @@ class ApplicationType(
         :param storage: The storage where the files can be copied to.
         :param progress_builder: If provided will be used to build a child progress bar
             and report on this methods progress to the parent of the progress_builder.
-        :param baserow_import_export_mode: defines which Baserow import/export mode to
-            use, defaults to `TARGETING_SAME_WORKSPACE_NEW_PK`.
-        :type baserow_import_export_mode: enum
+        :param import_export_config: provides configuration options for the
+            import/export process to customize how it works.
         :return: The newly created application.
         """
 
@@ -460,7 +459,7 @@ class ApplicationType(
             id_mapping,
             application,
             serialized_values,
-            baserow_import_export_mode,
+            import_export_config,
             workspace,
         )
 
@@ -1158,34 +1157,34 @@ class SerializationProcessorType(abc.ABC, Instance):
     `import_serialized` and `export_serialized` methods.
     """
 
-    import_export_mode = None
-
     @classmethod
     def import_serialized(
         cls,
         workspace: "Workspace",
         scope: SerializationProcessorScope,
         serialized_scope: dict,
-    ) -> dict:
+        import_export_config: ImportExportConfig,
+    ):
         """
         A hook which is called after an application subclass or table has been
         imported, allowing us to import additional data in `serialized_scope`.
         """
 
-        return {}
+        pass
 
     @classmethod
     def export_serialized(
         cls,
         workspace: "Workspace",
         scope: SerializationProcessorScope,
-    ) -> dict[str, Any]:
+        import_export_config: ImportExportConfig,
+    ) -> Optional[Dict[str, Any]]:
         """
         A hook which is called after an application subclass or table has been
         exported, allowing us to export additional data.
         """
 
-        return {}
+        return None
 
 
 class SerializationProcessorRegistry(Registry[SerializationProcessorType]):
@@ -1197,20 +1196,6 @@ class SerializationProcessorRegistry(Registry[SerializationProcessorType]):
 
     name = "serialization_processors"
 
-    def get_all_for_mode(
-        self, import_export_mode: BaserowImportExportMode
-    ) -> List[InstanceSubClass]:
-        """
-        Returns registrables from `get_all`, filtered down to only
-        those that have a matching import/export mode.
-        """
-
-        return [
-            registrable
-            for registrable in super().get_all()
-            if registrable.import_export_mode == import_export_mode
-        ]
-
 
 # A default plugin and application registry is created here, this is the one that is
 # used throughout the whole Baserow application. To add a new plugin or application use
diff --git a/backend/src/baserow/core/snapshots/handler.py b/backend/src/baserow/core/snapshots/handler.py
index 39664f154..f7b1a2d18 100755
--- a/backend/src/baserow/core/snapshots/handler.py
+++ b/backend/src/baserow/core/snapshots/handler.py
@@ -18,7 +18,7 @@ from baserow.core.handler import CoreHandler
 from baserow.core.jobs.handler import JobHandler
 from baserow.core.jobs.models import Job
 from baserow.core.models import Application, Snapshot, User, Workspace
-from baserow.core.registries import application_type_registry
+from baserow.core.registries import ImportExportConfig, application_type_registry
 from baserow.core.signals import application_created
 from baserow.core.snapshots.exceptions import (
     MaximumSnapshotsReached,
@@ -380,9 +380,12 @@ class SnapshotHandler:
         )
 
         application_type = application_type_registry.get_by_model(application)
+        snapshot_import_export_config = ImportExportConfig(
+            include_permission_data=True, reduce_disk_space_usage=True
+        )
         try:
             exported_application = application_type.export_serialized(
-                application, None, default_storage
+                application, snapshot_import_export_config, None, default_storage
             )
         except OperationalError as e:
             # Detect if this `OperationalError` is due to us exceeding the
@@ -402,6 +405,7 @@ class SnapshotHandler:
         application_type.import_serialized(
             None,
             exported_application,
+            snapshot_import_export_config,
             id_mapping,
             None,
             default_storage,
@@ -434,14 +438,19 @@ class SnapshotHandler:
 
         application = snapshot.snapshot_to_application.specific
         application_type = application_type_registry.get_by_model(application)
+
+        restore_snapshot_import_export_config = ImportExportConfig(
+            include_permission_data=True, reduce_disk_space_usage=False
+        )
         exported_application = application_type.export_serialized(
-            application, None, default_storage
+            application, restore_snapshot_import_export_config, None, default_storage
         )
         progress.increment(by=50)
 
         imported_application = application_type.import_serialized(
             snapshot.snapshot_from_application.workspace,
             exported_application,
+            restore_snapshot_import_export_config,
             {},
             None,
             default_storage,
diff --git a/backend/tests/baserow/contrib/builder/test_builder_application_type.py b/backend/tests/baserow/contrib/builder/test_builder_application_type.py
index 9cfe64602..50a838745 100644
--- a/backend/tests/baserow/contrib/builder/test_builder_application_type.py
+++ b/backend/tests/baserow/contrib/builder/test_builder_application_type.py
@@ -5,6 +5,7 @@ from baserow.contrib.builder.elements.models import HeadingElement, ParagraphEle
 from baserow.contrib.builder.models import Builder
 from baserow.contrib.builder.pages.models import Page
 from baserow.core.db import specific_iterator
+from baserow.core.registries import ImportExportConfig
 from baserow.core.trash.handler import TrashHandler
 
 
@@ -34,7 +35,9 @@ def test_builder_application_export(data_fixture):
     element2 = data_fixture.create_builder_paragraph_element(page=page1)
     element3 = data_fixture.create_builder_heading_element(page=page2)
 
-    serialized = BuilderApplicationType().export_serialized(builder)
+    serialized = BuilderApplicationType().export_serialized(
+        builder, ImportExportConfig(include_permission_data=True)
+    )
 
     assert serialized == {
         "pages": [
@@ -143,8 +146,9 @@ def test_builder_application_import(data_fixture):
     user = data_fixture.create_user()
     workspace = data_fixture.create_workspace(user=user)
 
+    config = ImportExportConfig(include_permission_data=True)
     builder = BuilderApplicationType().import_serialized(
-        workspace, IMPORT_REFERENCE, {}
+        workspace, IMPORT_REFERENCE, config, {}
     )
 
     assert builder.id != IMPORT_REFERENCE["id"]
diff --git a/backend/tests/baserow/contrib/database/field/test_count_field_type.py b/backend/tests/baserow/contrib/database/field/test_count_field_type.py
index c9093d116..99431f103 100644
--- a/backend/tests/baserow/contrib/database/field/test_count_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_count_field_type.py
@@ -10,6 +10,7 @@ from baserow.contrib.database.fields.handler import FieldHandler
 from baserow.contrib.database.formula import BaserowFormulaNumberType
 from baserow.contrib.database.rows.handler import RowHandler
 from baserow.core.handler import CoreHandler
+from baserow.core.registries import ImportExportConfig
 
 
 @pytest.mark.django_db
@@ -367,11 +368,16 @@ def test_import_export_tables_with_count_fields(
         through_field_id=link_row_field.id,
     )
 
+    config = ImportExportConfig(include_permission_data=False)
     exported_applications = core_handler.export_workspace_applications(
-        database.workspace, BytesIO()
+        database.workspace, BytesIO(), config
     )
     imported_applications, id_mapping = core_handler.import_applications_to_workspace(
-        imported_workspace, exported_applications, BytesIO(), None
+        imported_workspace,
+        exported_applications,
+        BytesIO(),
+        config,
+        None,
     )
     imported_database = imported_applications[0]
     imported_tables = imported_database.table_set.all()
diff --git a/backend/tests/baserow/contrib/database/field/test_created_on_field_type.py b/backend/tests/baserow/contrib/database/field/test_created_on_field_type.py
index 180dfd126..62ae19b23 100644
--- a/backend/tests/baserow/contrib/database/field/test_created_on_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_created_on_field_type.py
@@ -11,6 +11,7 @@ from baserow.contrib.database.fields.handler import FieldHandler
 from baserow.contrib.database.fields.models import CreatedOnField
 from baserow.contrib.database.rows.handler import RowHandler
 from baserow.core.handler import CoreHandler
+from baserow.core.registries import ImportExportConfig
 
 
 @pytest.mark.django_db
@@ -182,8 +183,9 @@ def test_import_export_last_modified_field(data_fixture):
     )
 
     core_handler = CoreHandler()
+    config = ImportExportConfig(include_permission_data=False)
     exported_applications = core_handler.export_workspace_applications(
-        database.workspace, BytesIO()
+        database.workspace, BytesIO(), config
     )
 
     # We manually set this value in the export, because if it's set, then the import
@@ -197,7 +199,7 @@ def test_import_export_last_modified_field(data_fixture):
             imported_applications,
             id_mapping,
         ) = core_handler.import_applications_to_workspace(
-            imported_workspace, exported_applications, BytesIO(), None
+            imported_workspace, exported_applications, BytesIO(), config, None
         )
 
     imported_database = imported_applications[0]
diff --git a/backend/tests/baserow/contrib/database/field/test_date_field_type.py b/backend/tests/baserow/contrib/database/field/test_date_field_type.py
index 26f5a7fd9..84b034179 100644
--- a/backend/tests/baserow/contrib/database/field/test_date_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_date_field_type.py
@@ -14,6 +14,7 @@ from baserow.contrib.database.fields.handler import FieldHandler
 from baserow.contrib.database.fields.models import DateField
 from baserow.contrib.database.fields.registries import field_type_registry
 from baserow.contrib.database.rows.handler import RowHandler
+from baserow.core.registries import ImportExportConfig
 
 
 @pytest.mark.django_db
@@ -542,7 +543,11 @@ def test_import_export_date_field(data_fixture):
     date_field_type = field_type_registry.get_by_model(date_field)
     number_serialized = date_field_type.export_serialized(date_field)
     number_field_imported = date_field_type.import_serialized(
-        date_field.table, number_serialized, {}, DeferredFieldFkUpdater()
+        date_field.table,
+        number_serialized,
+        ImportExportConfig(include_permission_data=True),
+        {},
+        DeferredFieldFkUpdater(),
     )
     assert date_field.date_format == number_field_imported.date_format
     assert date_field.date_include_time == number_field_imported.date_include_time
diff --git a/backend/tests/baserow/contrib/database/field/test_field_types.py b/backend/tests/baserow/contrib/database/field/test_field_types.py
index c94b69cee..87fa747e7 100644
--- a/backend/tests/baserow/contrib/database/field/test_field_types.py
+++ b/backend/tests/baserow/contrib/database/field/test_field_types.py
@@ -17,6 +17,7 @@ from baserow.contrib.database.fields.models import (
 )
 from baserow.contrib.database.fields.registries import FieldType, field_type_registry
 from baserow.contrib.database.rows.handler import RowHandler
+from baserow.core.registries import ImportExportConfig
 from baserow.test_utils.helpers import setup_interesting_test_table
 
 
@@ -30,7 +31,11 @@ def test_import_export_text_field(data_fixture):
     text_field_type = field_type_registry.get_by_model(text_field)
     text_serialized = text_field_type.export_serialized(text_field)
     text_field_imported = text_field_type.import_serialized(
-        text_field.table, text_serialized, id_mapping, DeferredFieldFkUpdater()
+        text_field.table,
+        text_serialized,
+        ImportExportConfig(include_permission_data=True),
+        id_mapping,
+        DeferredFieldFkUpdater(),
     )
     assert text_field.id != text_field_imported.id
     assert text_field.name == text_field_imported.name
@@ -66,6 +71,7 @@ def test_import_export_formula_field(data_fixture, api_client):
     formula_field_imported = formula_field_type.import_serialized(
         text_field_in_diff_table.table,
         formula_serialized,
+        ImportExportConfig(include_permission_data=True),
         id_mapping,
         DeferredFieldFkUpdater(),
     )
@@ -670,7 +676,11 @@ def test_import_export_lookup_field(data_fixture, api_client):
 
     deferred_field_fk_updater = DeferredFieldFkUpdater()
     lookup_field_imported = lookup_field_type.import_serialized(
-        table_a, lookup_serialized, id_mapping, deferred_field_fk_updater
+        table_a,
+        lookup_serialized,
+        ImportExportConfig(include_permission_data=True),
+        id_mapping,
+        deferred_field_fk_updater,
     )
     assert lookup.id != lookup_field_imported.id
     assert lookup_field_imported.name == "lookup"
diff --git a/backend/tests/baserow/contrib/database/field/test_file_field_type.py b/backend/tests/baserow/contrib/database/field/test_file_field_type.py
index 2986f36bc..71d165ca5 100644
--- a/backend/tests/baserow/contrib/database/field/test_file_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_file_field_type.py
@@ -12,6 +12,7 @@ from baserow.contrib.database.fields.handler import FieldHandler
 from baserow.contrib.database.fields.models import FileField
 from baserow.contrib.database.rows.handler import RowHandler
 from baserow.core.handler import CoreHandler
+from baserow.core.registries import ImportExportConfig
 from baserow.core.user_files.exceptions import (
     InvalidUserFileNameError,
     UserFileDoesNotExist,
@@ -225,8 +226,13 @@ def test_import_export_file_field(data_fixture, tmpdir):
     )
 
     files_buffer = BytesIO()
+    config = ImportExportConfig(include_permission_data=False)
+
     exported_applications = core_handler.export_workspace_applications(
-        database.workspace, files_buffer=files_buffer, storage=storage
+        database.workspace,
+        files_buffer=files_buffer,
+        storage=storage,
+        import_export_config=config,
     )
 
     # We expect that the exported zip file contains the user file used in the created
@@ -259,7 +265,7 @@ def test_import_export_file_field(data_fixture, tmpdir):
     ] = "test2.txt"
 
     imported_applications, id_mapping = core_handler.import_applications_to_workspace(
-        imported_workspace, exported_applications, files_buffer, storage
+        imported_workspace, exported_applications, files_buffer, config, storage
     )
     imported_database = imported_applications[0]
     imported_tables = imported_database.table_set.all()
diff --git a/backend/tests/baserow/contrib/database/field/test_last_modified_field_type.py b/backend/tests/baserow/contrib/database/field/test_last_modified_field_type.py
index 8912caf35..ce2b7c126 100644
--- a/backend/tests/baserow/contrib/database/field/test_last_modified_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_last_modified_field_type.py
@@ -11,6 +11,7 @@ from baserow.contrib.database.fields.handler import FieldHandler
 from baserow.contrib.database.fields.models import LastModifiedField
 from baserow.contrib.database.rows.handler import RowHandler
 from baserow.core.handler import CoreHandler
+from baserow.core.registries import ImportExportConfig
 
 
 @pytest.mark.django_db
@@ -186,8 +187,10 @@ def test_import_export_last_modified_field(data_fixture):
     )
 
     core_handler = CoreHandler()
+    config = ImportExportConfig(include_permission_data=False)
+
     exported_applications = core_handler.export_workspace_applications(
-        database.workspace, BytesIO()
+        database.workspace, BytesIO(), config
     )
 
     # We manually set this value in the export, because if it's set, then the import
@@ -201,7 +204,7 @@ def test_import_export_last_modified_field(data_fixture):
             imported_applications,
             id_mapping,
         ) = core_handler.import_applications_to_workspace(
-            imported_workspace, exported_applications, BytesIO(), None
+            imported_workspace, exported_applications, BytesIO(), config, None
         )
 
     imported_database = imported_applications[0]
diff --git a/backend/tests/baserow/contrib/database/field/test_link_row_field_type.py b/backend/tests/baserow/contrib/database/field/test_link_row_field_type.py
index 35af7b203..0466cfa0f 100644
--- a/backend/tests/baserow/contrib/database/field/test_link_row_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_link_row_field_type.py
@@ -23,6 +23,7 @@ from baserow.contrib.database.rows.handler import RowHandler
 from baserow.contrib.database.table.handler import TableHandler
 from baserow.core.handler import CoreHandler
 from baserow.core.models import TrashEntry
+from baserow.core.registries import ImportExportConfig
 from baserow.core.trash.handler import TrashHandler
 
 
@@ -881,11 +882,13 @@ def test_import_export_link_row_field(data_fixture):
         values={f"field_{link_row_field.id}": [c_row.id, c_row_2.id]},
     )
 
+    config = ImportExportConfig(include_permission_data=False)
+
     exported_applications = core_handler.export_workspace_applications(
-        database.workspace, BytesIO()
+        database.workspace, BytesIO(), config
     )
     imported_applications, id_mapping = core_handler.import_applications_to_workspace(
-        imported_workspace, exported_applications, BytesIO(), None
+        imported_workspace, exported_applications, BytesIO(), config, None
     )
     imported_database = imported_applications[0]
     imported_tables = imported_database.table_set.all()
diff --git a/backend/tests/baserow/contrib/database/field/test_lookup_field_type.py b/backend/tests/baserow/contrib/database/field/test_lookup_field_type.py
index f16824395..d9b8c08df 100644
--- a/backend/tests/baserow/contrib/database/field/test_lookup_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_lookup_field_type.py
@@ -23,6 +23,7 @@ from baserow.contrib.database.rows.handler import RowHandler
 from baserow.contrib.database.views.handler import ViewHandler
 from baserow.core.db import specific_iterator
 from baserow.core.handler import CoreHandler
+from baserow.core.registries import ImportExportConfig
 
 
 @pytest.mark.django_db
@@ -457,7 +458,11 @@ def test_import_export_lookup_field_when_through_field_trashed(
     lookup.save()
 
     lookup_field_imported = lookup_field_type.import_serialized(
-        table_a, lookup_serialized, id_mapping, DeferredFieldFkUpdater()
+        table_a,
+        lookup_serialized,
+        ImportExportConfig(include_permission_data=True),
+        id_mapping,
+        DeferredFieldFkUpdater(),
     )
     assert lookup_field_imported.through_field is None
     assert lookup_field_imported.through_field_name == link_field.name
@@ -508,7 +513,11 @@ def test_import_export_lookup_field_trashed_target_field(data_fixture, api_clien
     lookup.save()
 
     lookup_field_imported = lookup_field_type.import_serialized(
-        table_a, lookup_serialized, id_mapping, DeferredFieldFkUpdater()
+        table_a,
+        lookup_serialized,
+        ImportExportConfig(include_permission_data=True),
+        id_mapping,
+        DeferredFieldFkUpdater(),
     )
     assert lookup_field_imported.through_field is None
     assert lookup_field_imported.through_field_name == link_field.name
@@ -578,11 +587,12 @@ def test_import_export_tables_with_lookup_fields(
         target_field_id=customer_age.id,
     )
 
+    config = ImportExportConfig(include_permission_data=False)
     exported_applications = core_handler.export_workspace_applications(
-        database.workspace, BytesIO()
+        database.workspace, BytesIO(), config
     )
     imported_applications, id_mapping = core_handler.import_applications_to_workspace(
-        imported_workspace, exported_applications, BytesIO(), None
+        imported_workspace, exported_applications, BytesIO(), config, None
     )
     imported_database = imported_applications[0]
     imported_tables = imported_database.table_set.all()
diff --git a/backend/tests/baserow/contrib/database/field/test_multiple_collaborators_field_type.py b/backend/tests/baserow/contrib/database/field/test_multiple_collaborators_field_type.py
index e5a7684e0..d01f28470 100644
--- a/backend/tests/baserow/contrib/database/field/test_multiple_collaborators_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_multiple_collaborators_field_type.py
@@ -11,6 +11,7 @@ from baserow.contrib.database.fields.models import MultipleCollaboratorsField
 from baserow.contrib.database.rows.handler import RowHandler
 from baserow.contrib.database.views.handler import ViewHandler
 from baserow.core.handler import CoreHandler
+from baserow.core.registries import ImportExportConfig
 
 
 @pytest.mark.django_db
@@ -131,11 +132,12 @@ def test_get_set_export_serialized_value_multiple_collaborators_field(data_fixtu
         },
     )
 
+    config = ImportExportConfig(include_permission_data=False)
     exported_applications = core_handler.export_workspace_applications(
-        workspace, BytesIO()
+        workspace, BytesIO(), config
     )
     imported_applications, id_mapping = core_handler.import_applications_to_workspace(
-        imported_workspace, exported_applications, BytesIO(), None
+        imported_workspace, exported_applications, BytesIO(), config, None
     )
     imported_database = imported_applications[0]
     imported_table = imported_database.table_set.all()[0]
diff --git a/backend/tests/baserow/contrib/database/field/test_multiple_select_field_type.py b/backend/tests/baserow/contrib/database/field/test_multiple_select_field_type.py
index 37e0fc89a..9ab139656 100644
--- a/backend/tests/baserow/contrib/database/field/test_multiple_select_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_multiple_select_field_type.py
@@ -29,6 +29,7 @@ from baserow.contrib.database.fields.registries import field_type_registry
 from baserow.contrib.database.rows.handler import RowHandler
 from baserow.contrib.database.views.handler import ViewHandler
 from baserow.core.handler import CoreHandler
+from baserow.core.registries import ImportExportConfig
 
 
 @pytest.mark.django_db
@@ -732,7 +733,11 @@ def test_import_export_multiple_select_field(data_fixture):
     field_serialized = field_type.export_serialized(field)
     id_mapping = {}
     field_imported = field_type.import_serialized(
-        table, field_serialized, id_mapping, DeferredFieldFkUpdater()
+        table,
+        field_serialized,
+        ImportExportConfig(include_permission_data=True),
+        id_mapping,
+        DeferredFieldFkUpdater(),
     )
 
     assert field_imported.select_options.all().count() == 4
@@ -798,11 +803,13 @@ def test_get_set_export_serialized_value_multiple_select_field(
     )
     assert len(SelectOption.objects.all()) == 3
 
+    config = ImportExportConfig(include_permission_data=False)
+
     exported_applications = core_handler.export_workspace_applications(
-        workspace, BytesIO()
+        workspace, BytesIO(), config
     )
     imported_applications, id_mapping = core_handler.import_applications_to_workspace(
-        imported_workspace, exported_applications, BytesIO(), None
+        imported_workspace, exported_applications, BytesIO(), config, None
     )
     imported_database = imported_applications[0]
     imported_table = imported_database.table_set.all()[0]
diff --git a/backend/tests/baserow/contrib/database/field/test_number_field_type.py b/backend/tests/baserow/contrib/database/field/test_number_field_type.py
index b340a635f..f69cdccb8 100644
--- a/backend/tests/baserow/contrib/database/field/test_number_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_number_field_type.py
@@ -11,6 +11,7 @@ from baserow.contrib.database.fields.handler import FieldHandler
 from baserow.contrib.database.fields.models import NumberField
 from baserow.contrib.database.fields.registries import field_type_registry
 from baserow.contrib.database.rows.handler import RowHandler
+from baserow.core.registries import ImportExportConfig
 
 
 @pytest.mark.django_db
@@ -198,7 +199,11 @@ def test_import_export_number_field(data_fixture):
     number_field_type = field_type_registry.get_by_model(number_field)
     number_serialized = number_field_type.export_serialized(number_field)
     number_field_imported = number_field_type.import_serialized(
-        number_field.table, number_serialized, {}, DeferredFieldFkUpdater()
+        number_field.table,
+        number_serialized,
+        ImportExportConfig(include_permission_data=True),
+        {},
+        DeferredFieldFkUpdater(),
     )
     assert number_field.number_negative == number_field_imported.number_negative
     assert number_field.number_decimal_places == (
diff --git a/backend/tests/baserow/contrib/database/field/test_rollup_field_type.py b/backend/tests/baserow/contrib/database/field/test_rollup_field_type.py
index ca8523e4a..267000103 100644
--- a/backend/tests/baserow/contrib/database/field/test_rollup_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_rollup_field_type.py
@@ -14,6 +14,7 @@ from baserow.contrib.database.formula import BaserowFormulaNumberType
 from baserow.contrib.database.formula.types.exceptions import InvalidFormulaType
 from baserow.contrib.database.rows.handler import RowHandler
 from baserow.core.handler import CoreHandler
+from baserow.core.registries import ImportExportConfig
 from baserow.formula.exceptions import FormulaFunctionTypeDoesNotExist
 
 
@@ -584,11 +585,12 @@ def test_import_export_tables_with_rollup_fields(
         rollup_function="sum",
     )
 
+    config = ImportExportConfig(include_permission_data=False)
     exported_applications = core_handler.export_workspace_applications(
-        database.workspace, BytesIO()
+        database.workspace, BytesIO(), config
     )
     imported_applications, id_mapping = core_handler.import_applications_to_workspace(
-        imported_workspace, exported_applications, BytesIO(), None
+        imported_workspace, exported_applications, BytesIO(), config, None
     )
     imported_database = imported_applications[0]
     imported_tables = imported_database.table_set.all()
diff --git a/backend/tests/baserow/contrib/database/field/test_single_select_field_type.py b/backend/tests/baserow/contrib/database/field/test_single_select_field_type.py
index b2a4a1643..67af6609a 100644
--- a/backend/tests/baserow/contrib/database/field/test_single_select_field_type.py
+++ b/backend/tests/baserow/contrib/database/field/test_single_select_field_type.py
@@ -21,6 +21,7 @@ from baserow.contrib.database.fields.registries import field_type_registry
 from baserow.contrib.database.rows.handler import RowHandler
 from baserow.contrib.database.views.handler import ViewHandler
 from baserow.core.handler import CoreHandler
+from baserow.core.registries import ImportExportConfig
 
 
 @pytest.mark.django_db
@@ -950,7 +951,11 @@ def test_import_export_single_select_field(data_fixture):
     field_serialized = field_type.export_serialized(field)
     id_mapping = {}
     field_imported = field_type.import_serialized(
-        table, field_serialized, id_mapping, DeferredFieldFkUpdater()
+        table,
+        field_serialized,
+        ImportExportConfig(include_permission_data=True),
+        id_mapping,
+        DeferredFieldFkUpdater(),
     )
 
     assert field_imported.select_options.all().count() == 1
@@ -979,11 +984,12 @@ def test_get_set_export_serialized_value_single_select_field(data_fixture):
     model.objects.create(**{f"field_{field.id}_id": option_a.id})
     model.objects.create(**{f"field_{field.id}_id": option_b.id})
 
+    config = ImportExportConfig(include_permission_data=False)
     exported_applications = core_handler.export_workspace_applications(
-        workspace, BytesIO()
+        workspace, BytesIO(), config
     )
     imported_applications, id_mapping = core_handler.import_applications_to_workspace(
-        imported_workspace, exported_applications, BytesIO(), None
+        imported_workspace, exported_applications, BytesIO(), config, None
     )
     imported_database = imported_applications[0]
     imported_table = imported_database.table_set.all()[0]
@@ -1028,11 +1034,13 @@ def test_get_set_export_serialized_value_single_select_field_with_deleted_option
     # Deleting the option doesn't set the row value to None.
     option_a.delete()
 
+    config = ImportExportConfig(include_permission_data=False)
+
     exported_applications = core_handler.export_workspace_applications(
-        workspace, BytesIO()
+        workspace, BytesIO(), config
     )
     imported_applications, id_mapping = core_handler.import_applications_to_workspace(
-        imported_workspace, exported_applications, BytesIO(), None
+        imported_workspace, exported_applications, BytesIO(), config, None
     )
     imported_database = imported_applications[0]
     imported_table = imported_database.table_set.all()[0]
diff --git a/backend/tests/baserow/contrib/database/test_database_application_type.py b/backend/tests/baserow/contrib/database/test_database_application_type.py
index d29a8a303..92cda3647 100644
--- a/backend/tests/baserow/contrib/database/test_database_application_type.py
+++ b/backend/tests/baserow/contrib/database/test_database_application_type.py
@@ -7,7 +7,7 @@ from pytz import UTC
 from baserow.contrib.database.fields.models import FormulaField, TextField
 from baserow.contrib.database.table.models import Table
 from baserow.core.handler import CoreHandler
-from baserow.core.registries import application_type_registry
+from baserow.core.registries import ImportExportConfig, application_type_registry
 
 
 @pytest.mark.django_db
@@ -35,7 +35,8 @@ def test_import_export_database(data_fixture):
     row.refresh_from_db()
 
     database_type = application_type_registry.get("database")
-    serialized = database_type.export_serialized(database, None, None)
+    config = ImportExportConfig(include_permission_data=True)
+    serialized = database_type.export_serialized(database, config)
 
     # Delete the updated on, because the import should also be compatible with
     # without these values present.
@@ -47,7 +48,12 @@ def test_import_export_database(data_fixture):
 
     with freeze_time("2022-01-01 12:00"):
         imported_database = database_type.import_serialized(
-            imported_workspace, serialized, id_mapping, None, None
+            imported_workspace,
+            serialized,
+            config,
+            id_mapping,
+            None,
+            None,
         )
 
     assert imported_database.id != database.id
diff --git a/backend/tests/baserow/core/test_core_handler.py b/backend/tests/baserow/core/test_core_handler.py
index 49c06a386..7b4fdab35 100755
--- a/backend/tests/baserow/core/test_core_handler.py
+++ b/backend/tests/baserow/core/test_core_handler.py
@@ -43,6 +43,7 @@ from baserow.core.models import (
     WorkspaceUser,
 )
 from baserow.core.operations import ReadWorkspaceOperationType
+from baserow.core.registries import ImportExportConfig
 from baserow.core.trash.handler import TrashHandler
 from baserow.core.user_files.models import UserFile
 
@@ -1141,9 +1142,12 @@ def test_export_import_workspace_application(data_fixture):
     data_fixture.create_database_table(database=database)
 
     handler = CoreHandler()
-    exported_applications = handler.export_workspace_applications(workspace, BytesIO())
+    config = ImportExportConfig(include_permission_data=False)
+    exported_applications = handler.export_workspace_applications(
+        workspace, BytesIO(), config
+    )
     imported_applications, id_mapping = handler.import_applications_to_workspace(
-        imported_workspace, exported_applications, BytesIO(), None
+        imported_workspace, exported_applications, BytesIO(), config, None
     )
 
     assert len(imported_applications) == 1
diff --git a/backend/tests/baserow/core/test_core_registry.py b/backend/tests/baserow/core/test_core_registry.py
index 95ead8177..2314deb82 100644
--- a/backend/tests/baserow/core/test_core_registry.py
+++ b/backend/tests/baserow/core/test_core_registry.py
@@ -9,11 +9,6 @@ from baserow.core.exceptions import (
     InstanceTypeAlreadyRegistered,
     InstanceTypeDoesNotExist,
 )
-from baserow.core.registries import (
-    BaserowImportExportMode,
-    SerializationProcessorRegistry,
-    SerializationProcessorType,
-)
 from baserow.core.registry import (
     CustomFieldsInstanceMixin,
     CustomFieldsRegistryMixin,
@@ -204,34 +199,3 @@ def test_get_serializer(data_fixture):
 
     serializer = registry.get_serializer(database, request=True)
     assert "order" in serializer.data
-
-
-def test_serialization_processor_registry_get_all_for_mode_matching_mode():
-    class MatchingProcessorType(SerializationProcessorType):
-        type = "matching_processor_type"
-        import_export_mode = BaserowImportExportMode.TARGETING_SAME_WORKSPACE_NEW_PK
-
-    processor_type = MatchingProcessorType()
-    registry = SerializationProcessorRegistry()
-    registry.register(processor_type)
-
-    assert registry.get_all_for_mode(
-        BaserowImportExportMode.TARGETING_SAME_WORKSPACE_NEW_PK
-    ) == [processor_type]
-
-
-def test_serialization_processor_registry_get_all_for_mode_mismatching_mode():
-    class MatchingProcessorType(SerializationProcessorType):
-        type = "mismatching_processor_type"
-        import_export_mode = BaserowImportExportMode.TARGETING_SAME_WORKSPACE_NEW_PK
-
-    processor_type = MatchingProcessorType()
-    registry = SerializationProcessorRegistry()
-    registry.register(processor_type)
-
-    assert (
-        registry.get_all_for_mode(
-            BaserowImportExportMode.TARGETING_DIFF_WORKSPACE_NEW_PK
-        )
-        == []
-    )
diff --git a/enterprise/backend/src/baserow_enterprise/structure_types.py b/enterprise/backend/src/baserow_enterprise/structure_types.py
index f1819d782..ab4fd6897 100644
--- a/enterprise/backend/src/baserow_enterprise/structure_types.py
+++ b/enterprise/backend/src/baserow_enterprise/structure_types.py
@@ -5,7 +5,7 @@ from django.contrib.contenttypes.models import ContentType
 from baserow_premium.license.handler import LicenseHandler
 
 from baserow.core.models import Application
-from baserow.core.registries import BaserowImportExportMode, SerializationProcessorType
+from baserow.core.registries import ImportExportConfig, SerializationProcessorType
 from baserow.core.types import SerializationProcessorScope
 from baserow.core.utils import atomic_if_not_already
 from baserow_enterprise.features import RBAC
@@ -30,7 +30,6 @@ class EnterpriseExportSerializedStructure:
 class RoleAssignmentSerializationProcessorType(SerializationProcessorType):
     type = "role_assignment_serialization_processors"
     structure = EnterpriseExportSerializedStructure
-    import_export_mode = BaserowImportExportMode.TARGETING_SAME_WORKSPACE_NEW_PK
 
     @classmethod
     def import_serialized(
@@ -38,12 +37,19 @@ class RoleAssignmentSerializationProcessorType(SerializationProcessorType):
         workspace: "Workspace",
         scope: SerializationProcessorScope,
         serialized_scope: dict,
-    ) -> None:
+        import_export_config: ImportExportConfig,
+    ):
         """
         Responsible for importing any `role_assignments` in `serialized_scope`
         into a newly restored/duplicated scope in `scope`.
         """
 
+        if not import_export_config.include_permission_data:
+            # We cannot yet export RBAC roles to another workspace as we would also need
+            # to export all subjects to the new workspace also or somehow allow to user
+            # to choose how to map subjects.
+            return
+
         # Application subclass scopes can't be passed to
         # the role assignment handler. See #1624.
         if isinstance(scope, Application):
@@ -71,12 +77,19 @@ class RoleAssignmentSerializationProcessorType(SerializationProcessorType):
         cls,
         workspace: "Workspace",
         scope: SerializationProcessorScope,
+        import_export_config: ImportExportConfig,
     ) -> dict[str, Any]:
         """
         Exports the `role_assignments` in `scope` when it is being exported
         by an application type `export_serialized`.
         """
 
+        if not import_export_config.include_permission_data:
+            # We cannot yet export RBAC roles to another workspace as we would also need
+            # to export all subjects to the new workspace also or somehow allow to user
+            # to choose how to map subjects.
+            return
+
         # Do not export anything if the workspace doesn't have RBAC enabled.
         if not LicenseHandler.workspace_has_feature(RBAC, workspace):
             return {}
diff --git a/enterprise/backend/tests/baserow_enterprise_tests/test_registries.py b/enterprise/backend/tests/baserow_enterprise_tests/test_registries.py
index 75eef4db2..13658b95a 100644
--- a/enterprise/backend/tests/baserow_enterprise_tests/test_registries.py
+++ b/enterprise/backend/tests/baserow_enterprise_tests/test_registries.py
@@ -3,6 +3,7 @@ from django.contrib.contenttypes.models import ContentType
 import pytest
 
 from baserow.contrib.database.table.handler import TableHandler
+from baserow.core.registries import ImportExportConfig
 from baserow_enterprise.role.handler import RoleAssignmentHandler
 from baserow_enterprise.role.models import Role
 from baserow_enterprise.structure_types import RoleAssignmentSerializationProcessorType
@@ -21,10 +22,12 @@ def test_export_serialized_structure_on_database(enterprise_data_fixture):
     database = enterprise_data_fixture.create_database_application(workspace=workspace)
     application = database.application_ptr
 
+    config = ImportExportConfig(include_permission_data=True)
+
     role = Role.objects.get(uid="ADMIN")
     RoleAssignmentHandler().assign_role(user, workspace, role, application)
     serialized_structure = enterprise_structure.export_serialized(
-        workspace, application
+        workspace, application, config
     )
 
     content_types = ContentType.objects.get_for_models(user, application)
@@ -49,8 +52,9 @@ def test_import_serialized_structure_on_database(enterprise_data_fixture):
 
     role = Role.objects.get(uid="ADMIN")
     RoleAssignmentHandler().assign_role(user, workspace, role, application)
+    config = ImportExportConfig(include_permission_data=True)
     serialized_structure = enterprise_structure.export_serialized(
-        workspace, application
+        workspace, application, config
     )
 
     new_database = enterprise_data_fixture.create_database_application(
@@ -59,7 +63,7 @@ def test_import_serialized_structure_on_database(enterprise_data_fixture):
     new_application = new_database.application_ptr
 
     enterprise_structure.import_serialized(
-        workspace, new_application, serialized_structure
+        workspace, new_application, serialized_structure, config
     )
 
     role_assignments = RoleAssignmentHandler().get_role_assignments(
@@ -82,11 +86,15 @@ def test_export_serialized_structure_on_table(enterprise_data_fixture):
     workspace = enterprise_data_fixture.create_workspace(user=user)
     database = enterprise_data_fixture.create_database_application(workspace=workspace)
 
+    config = ImportExportConfig(include_permission_data=True)
+
     role = Role.objects.get(uid="ADMIN")
     table, _ = TableHandler().create_table(user, database, name="Table")
     RoleAssignmentHandler().assign_role(user, workspace, role, table)
 
-    serialized_structure = enterprise_structure.export_serialized(workspace, table)
+    serialized_structure = enterprise_structure.export_serialized(
+        workspace, table, config
+    )
 
     content_types = ContentType.objects.get_for_models(user, table)
     assert serialized_structure == {
@@ -107,13 +115,19 @@ def test_import_serialized_structure_on_table(enterprise_data_fixture):
     workspace = enterprise_data_fixture.create_workspace(user=user)
     database = enterprise_data_fixture.create_database_application(workspace=workspace)
 
+    config = ImportExportConfig(include_permission_data=True)
+
     role = Role.objects.get(uid="ADMIN")
     table, _ = TableHandler().create_table(user, database, name="Table")
     RoleAssignmentHandler().assign_role(user, workspace, role, table)
-    serialized_structure = enterprise_structure.export_serialized(workspace, table)
+    serialized_structure = enterprise_structure.export_serialized(
+        workspace, table, config
+    )
 
     new_table, _ = TableHandler().create_table(user, database, name="New table")
-    enterprise_structure.import_serialized(workspace, new_table, serialized_structure)
+    enterprise_structure.import_serialized(
+        workspace, new_table, serialized_structure, config
+    )
 
     role_assignments = RoleAssignmentHandler().get_role_assignments(
         workspace, new_table