mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-07 06:15:36 +00:00
Fix Baserow table data sync export import authorized user and source table bug
This commit is contained in:
parent
71d3fad143
commit
af51a86438
12 changed files with 353 additions and 10 deletions
backend/src/baserow
contrib/database
core
changelog/entries/unreleased/bug
enterprise/backend
src/baserow_enterprise/data_sync
tests/baserow_enterprise_tests/data_sync
web-frontend/modules
|
@ -533,7 +533,7 @@ class DatabaseApplicationType(ApplicationType):
|
|||
# metadata is imported too.
|
||||
self._import_extra_metadata(serialized_tables, id_mapping, import_export_config)
|
||||
|
||||
self._import_data_sync(serialized_tables, id_mapping)
|
||||
self._import_data_sync(serialized_tables, id_mapping, import_export_config)
|
||||
|
||||
return imported_tables
|
||||
|
||||
|
@ -552,14 +552,16 @@ class DatabaseApplicationType(ApplicationType):
|
|||
source_workspace, table, serialized_table, import_export_config
|
||||
)
|
||||
|
||||
def _import_data_sync(self, serialized_tables, id_mapping):
|
||||
def _import_data_sync(self, serialized_tables, id_mapping, import_export_config):
|
||||
for serialized_table in serialized_tables:
|
||||
if not serialized_table.get("data_sync", None):
|
||||
continue
|
||||
table = serialized_table["_object"]
|
||||
serialized_data_sync = serialized_table["data_sync"]
|
||||
data_sync_type = data_sync_type_registry.get(serialized_data_sync["type"])
|
||||
data_sync_type.import_serialized(table, serialized_data_sync, id_mapping)
|
||||
data_sync_type.import_serialized(
|
||||
table, serialized_data_sync, id_mapping, import_export_config
|
||||
)
|
||||
|
||||
def _import_table_rows(
|
||||
self,
|
||||
|
|
|
@ -246,6 +246,7 @@ class DataSyncHandler:
|
|||
progress = ChildProgressBuilder.build(progress_builder, 100)
|
||||
|
||||
data_sync_type = data_sync_type_registry.get_by_model(data_sync)
|
||||
data_sync_type.before_sync_table(user, data_sync)
|
||||
all_properties = data_sync_type.get_properties(data_sync)
|
||||
key_to_property = {p.key: p for p in all_properties}
|
||||
progress.increment(by=1)
|
||||
|
|
|
@ -9,6 +9,7 @@ from baserow.contrib.database.data_sync.export_serialized import (
|
|||
)
|
||||
from baserow.contrib.database.data_sync.models import DataSync, DataSyncSyncedProperty
|
||||
from baserow.contrib.database.fields.models import Field
|
||||
from baserow.core.registries import ImportExportConfig
|
||||
from baserow.core.registry import (
|
||||
CustomFieldsInstanceMixin,
|
||||
CustomFieldsRegistryMixin,
|
||||
|
@ -94,6 +95,14 @@ class DataSyncType(
|
|||
:param instance: The related data sync instance.
|
||||
"""
|
||||
|
||||
def before_sync_table(self, user: AbstractUser, instance: "DataSync"):
|
||||
"""
|
||||
A hook that's called right before the table sync starts.
|
||||
|
||||
:param user: The user on whose behalf the table is synced.
|
||||
:param instance: The related data sync instance.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_properties(self, instance: "DataSync") -> List[DataSyncProperty]:
|
||||
"""
|
||||
|
@ -155,7 +164,13 @@ class DataSyncType(
|
|||
**type_specific,
|
||||
)
|
||||
|
||||
def import_serialized(self, table, serialized_values, id_mapping):
|
||||
def import_serialized(
|
||||
self,
|
||||
table,
|
||||
serialized_values,
|
||||
id_mapping,
|
||||
import_export_config: ImportExportConfig,
|
||||
):
|
||||
"""
|
||||
Imports the data sync properties and the `allowed_fields`.
|
||||
"""
|
||||
|
|
|
@ -835,7 +835,9 @@ class TableHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
database_type = application_type_registry.get_by_model(database)
|
||||
|
||||
config = ImportExportConfig(
|
||||
include_permission_data=True, reduce_disk_space_usage=False
|
||||
include_permission_data=True,
|
||||
reduce_disk_space_usage=False,
|
||||
is_duplicate=True,
|
||||
)
|
||||
|
||||
serialized_tables = database_type.export_tables_serialized([table], config)
|
||||
|
|
|
@ -1494,7 +1494,9 @@ class CoreHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
progress.increment(by=start_progress)
|
||||
|
||||
duplicate_import_export_config = ImportExportConfig(
|
||||
include_permission_data=True, reduce_disk_space_usage=False
|
||||
include_permission_data=True,
|
||||
reduce_disk_space_usage=False,
|
||||
is_duplicate=True,
|
||||
)
|
||||
# export the application
|
||||
specific_application = application.specific
|
||||
|
|
|
@ -70,6 +70,7 @@ class ImportExportConfig:
|
|||
|
||||
include_permission_data: bool
|
||||
|
||||
reduce_disk_space_usage: bool = False
|
||||
"""
|
||||
Whether or not the import/export should attempt to save disk space by excluding
|
||||
certain pieces of optional data or processes that could instead be done later or
|
||||
|
@ -79,13 +80,18 @@ class ImportExportConfig:
|
|||
tsvector full text search columns as they can also be lazy loaded after the import
|
||||
when the user opens a view.
|
||||
"""
|
||||
reduce_disk_space_usage: bool = False
|
||||
|
||||
workspace_for_user_references: "Workspace" = None
|
||||
"""
|
||||
Determines an alternative workspace to search for user references
|
||||
during imports.
|
||||
"""
|
||||
workspace_for_user_references: "Workspace" = None
|
||||
|
||||
is_duplicate: bool = False
|
||||
"""
|
||||
Indicates whether the import export operation is duplicating an existing object.
|
||||
The data then doesn't leave the instance.
|
||||
"""
|
||||
|
||||
|
||||
class Plugin(APIUrlsInstanceMixin, Instance):
|
||||
|
|
|
@ -394,6 +394,7 @@ class SnapshotHandler:
|
|||
include_permission_data=True,
|
||||
reduce_disk_space_usage=True,
|
||||
workspace_for_user_references=workspace,
|
||||
is_duplicate=True,
|
||||
)
|
||||
try:
|
||||
exported_application = application_type.export_serialized(
|
||||
|
@ -454,7 +455,9 @@ class SnapshotHandler:
|
|||
application_type = application_type_registry.get_by_model(application)
|
||||
|
||||
restore_snapshot_import_export_config = ImportExportConfig(
|
||||
include_permission_data=True, reduce_disk_space_usage=False
|
||||
include_permission_data=True,
|
||||
reduce_disk_space_usage=False,
|
||||
is_duplicate=True,
|
||||
)
|
||||
# Temporary set the workspace for the application so that the permissions can
|
||||
# be correctly set during the import process.
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"type": "bug",
|
||||
"message": "Fix Baserow table data sync export import authorized user and source table bug.",
|
||||
"issue_number": null,
|
||||
"bullet_points": [],
|
||||
"created_at": "2024-10-24"
|
||||
}
|
|
@ -35,6 +35,7 @@ from baserow.contrib.database.fields.models import (
|
|||
TextField,
|
||||
)
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.fields.utils import get_field_id_from_field_key
|
||||
from baserow.contrib.database.rows.operations import ReadDatabaseRowOperationType
|
||||
from baserow.contrib.database.table.exceptions import TableDoesNotExist
|
||||
from baserow.contrib.database.table.handler import TableHandler
|
||||
|
@ -144,6 +145,15 @@ class LocalBaserowTableDataSyncType(DataSyncType):
|
|||
DATA_SYNC, instance.table.database.workspace
|
||||
)
|
||||
|
||||
def before_sync_table(self, user, instance):
|
||||
# If the authorized user was deleted, or the table was duplicated,
|
||||
# the authorized user is set to `None`. In this case, we're setting the
|
||||
# authorized user to the user on whos behalf the table is synced so that it
|
||||
# will work.
|
||||
if instance.authorized_user is None:
|
||||
instance.authorized_user = user
|
||||
instance.save()
|
||||
|
||||
def _get_table(self, instance):
|
||||
try:
|
||||
table = TableHandler().get_table(instance.source_table_id)
|
||||
|
@ -201,3 +211,54 @@ class LocalBaserowTableDataSyncType(DataSyncType):
|
|||
rows_queryset = model.objects.all().values(*["id"] + enabled_property_field_ids)
|
||||
progress.increment(by=9) # makes the total `10`
|
||||
return rows_queryset
|
||||
|
||||
def import_serialized(
|
||||
self, table, serialized_values, id_mapping, import_export_config
|
||||
):
|
||||
serialized_copy = serialized_values.copy()
|
||||
# Always unset the authorized user for security reasons. This is okay because
|
||||
# the first user to sync the data sync table will become the authorized user.
|
||||
serialized_copy["authorized_user_id"] = None
|
||||
source_table_id = serialized_copy["source_table_id"]
|
||||
|
||||
if source_table_id in id_mapping["database_tables"]:
|
||||
# If the source table exists in the mapping, it means that it was
|
||||
# included in the export. In that case, we want to use that one as source
|
||||
# table instead of the existing one.
|
||||
serialized_copy["source_table_id"] = id_mapping["database_tables"][
|
||||
source_table_id
|
||||
]
|
||||
serialized_copy["authorized_user_id"] = None
|
||||
data_sync = super().import_serialized(
|
||||
table, serialized_copy, id_mapping, import_export_config
|
||||
)
|
||||
|
||||
# Because we're now pointing to the newly imported data sync source table,
|
||||
# the field id keys must also be remapped.
|
||||
properties_to_update = []
|
||||
for data_sync_property in data_sync.synced_properties.all():
|
||||
key_field_id = get_field_id_from_field_key(data_sync_property.key)
|
||||
if key_field_id:
|
||||
new_field_id = id_mapping["database_fields"][key_field_id]
|
||||
data_sync_property.key = f"field_{new_field_id}"
|
||||
properties_to_update.append(data_sync_property)
|
||||
DataSyncSyncedProperty.objects.bulk_update(properties_to_update, ["key"])
|
||||
|
||||
return data_sync
|
||||
|
||||
if import_export_config.is_duplicate:
|
||||
# When duplicating the database or table, and it doesn't exist in the
|
||||
# id_mapping, then the source table is inside the same database or in
|
||||
# another workspace. In that case, we want to keep using the same.
|
||||
return super().import_serialized(
|
||||
table, serialized_copy, id_mapping, import_export_config
|
||||
)
|
||||
|
||||
# If the source table doesn't exist in the mapping, and we're not
|
||||
# duplicating, then it's not possible to preserve the data sync. We'll then
|
||||
# transform the fields to editable fields, keep the data, and keep the table
|
||||
# as regular table.
|
||||
table.field_set.all().update(
|
||||
read_only=False, immutable_type=False, immutable_properties=False
|
||||
)
|
||||
return None
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.test.utils import override_settings
|
||||
from django.urls import reverse
|
||||
|
||||
|
@ -14,7 +15,9 @@ from baserow.contrib.database.data_sync.exceptions import SyncError
|
|||
from baserow.contrib.database.data_sync.handler import DataSyncHandler
|
||||
from baserow.contrib.database.fields.models import NumberField
|
||||
from baserow.contrib.database.fields.registries import field_type_registry
|
||||
from baserow.contrib.database.table.handler import TableHandler
|
||||
from baserow.core.db import specific_iterator
|
||||
from baserow.core.registries import ImportExportConfig, application_type_registry
|
||||
from baserow.test_utils.helpers import setup_interesting_test_table
|
||||
from baserow_enterprise.data_sync.baserow_table_data_sync import (
|
||||
BaserowFieldDataSyncProperty,
|
||||
|
@ -235,6 +238,70 @@ def test_sync_data_sync_table(enterprise_data_fixture):
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_sync_data_sync_table_authorized_user_is_none(enterprise_data_fixture):
|
||||
enterprise_data_fixture.enable_enterprise()
|
||||
|
||||
user = enterprise_data_fixture.create_user()
|
||||
|
||||
source_table = enterprise_data_fixture.create_database_table(
|
||||
user=user, name="Source"
|
||||
)
|
||||
|
||||
database = enterprise_data_fixture.create_database_application(user=user)
|
||||
handler = DataSyncHandler()
|
||||
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="local_baserow_table",
|
||||
synced_properties=["id"],
|
||||
source_table_id=source_table.id,
|
||||
authorized_user=None,
|
||||
)
|
||||
handler.sync_data_sync_table(user=user, data_sync=data_sync)
|
||||
|
||||
data_sync.refresh_from_db()
|
||||
assert data_sync.authorized_user_id == user.id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_sync_data_sync_table_authorized_user_is_set(enterprise_data_fixture):
|
||||
enterprise_data_fixture.enable_enterprise()
|
||||
|
||||
user = enterprise_data_fixture.create_user()
|
||||
user_2 = enterprise_data_fixture.create_user()
|
||||
|
||||
workspace = enterprise_data_fixture.create_workspace(user=user)
|
||||
enterprise_data_fixture.create_user_workspace(
|
||||
workspace=workspace, user=user_2, order=0
|
||||
)
|
||||
|
||||
database = enterprise_data_fixture.create_database_application(workspace=workspace)
|
||||
source_table = enterprise_data_fixture.create_database_table(
|
||||
database=database, name="Source"
|
||||
)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="local_baserow_table",
|
||||
synced_properties=["id"],
|
||||
source_table_id=source_table.id,
|
||||
authorized_user=user,
|
||||
)
|
||||
handler.sync_data_sync_table(user=user_2, data_sync=data_sync)
|
||||
|
||||
data_sync.refresh_from_db()
|
||||
assert data_sync.authorized_user_id == user.id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_sync_data_sync_table_with_interesting_table_as_source(enterprise_data_fixture):
|
||||
|
@ -639,3 +706,177 @@ def test_async_sync_data_sync_table_without_license(
|
|||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_402_PAYMENT_REQUIRED
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_import_export_including_source_table(enterprise_data_fixture):
|
||||
enterprise_data_fixture.enable_enterprise()
|
||||
|
||||
user = enterprise_data_fixture.create_user()
|
||||
workspace = enterprise_data_fixture.create_workspace(user=user)
|
||||
database = enterprise_data_fixture.create_database_application(workspace=workspace)
|
||||
source_table = enterprise_data_fixture.create_database_table(
|
||||
name="Source", database=database
|
||||
)
|
||||
text_field = enterprise_data_fixture.create_text_field(
|
||||
table=source_table, name="Text"
|
||||
)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="local_baserow_table",
|
||||
synced_properties=["id", f"field_{text_field.id}"],
|
||||
source_table_id=source_table.id,
|
||||
)
|
||||
properties = data_sync.synced_properties.all().order_by("key")
|
||||
handler.sync_data_sync_table(user=user, data_sync=data_sync)
|
||||
|
||||
database_type = application_type_registry.get("database")
|
||||
config = ImportExportConfig(include_permission_data=True)
|
||||
serialized = database_type.export_serialized(database, config)
|
||||
|
||||
imported_workspace = enterprise_data_fixture.create_workspace()
|
||||
imported_workspace_user = enterprise_data_fixture.create_user_workspace(
|
||||
workspace=imported_workspace, user=user
|
||||
)
|
||||
id_mapping = {}
|
||||
|
||||
imported_database = database_type.import_serialized(
|
||||
imported_workspace,
|
||||
serialized,
|
||||
config,
|
||||
id_mapping,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
imported_table = imported_database.table_set.filter(name="Test").first()
|
||||
imported_source_table = imported_database.table_set.filter(name="Source").first()
|
||||
imported_data_sync = imported_table.data_sync.specific
|
||||
imported_text_field = imported_source_table.field_set.all().first()
|
||||
|
||||
assert imported_data_sync.authorized_user_id is None
|
||||
|
||||
assert imported_data_sync.source_table_id == imported_source_table.id
|
||||
fields = imported_data_sync.table.field_set.all().order_by("id")
|
||||
assert fields[0].read_only is True
|
||||
assert fields[1].read_only is True
|
||||
|
||||
imported_properties = imported_data_sync.synced_properties.all().order_by("key")
|
||||
assert imported_properties[0].key != f"field_{text_field.id}"
|
||||
assert imported_properties[0].key == f"field_{imported_text_field.id}"
|
||||
assert imported_properties[1].key == "id"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_import_export_duplicate_table(enterprise_data_fixture):
|
||||
enterprise_data_fixture.enable_enterprise()
|
||||
|
||||
user = enterprise_data_fixture.create_user()
|
||||
workspace = enterprise_data_fixture.create_workspace(user=user)
|
||||
database = enterprise_data_fixture.create_database_application(workspace=workspace)
|
||||
source_table = enterprise_data_fixture.create_database_table(
|
||||
name="Source", database=database
|
||||
)
|
||||
text_field = enterprise_data_fixture.create_text_field(
|
||||
table=source_table, name="Text"
|
||||
)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="local_baserow_table",
|
||||
synced_properties=["id", f"field_{text_field.id}"],
|
||||
source_table_id=source_table.id,
|
||||
)
|
||||
handler.sync_data_sync_table(user=user, data_sync=data_sync)
|
||||
|
||||
duplicated_table = TableHandler().duplicate_table(user, data_sync.table)
|
||||
assert duplicated_table.id != data_sync.table_id
|
||||
|
||||
imported_data_sync = duplicated_table.data_sync.specific
|
||||
assert imported_data_sync.source_table_id == data_sync.source_table_id
|
||||
assert imported_data_sync.authorized_user_id is None
|
||||
|
||||
assert imported_data_sync.authorized_user_id is None
|
||||
|
||||
fields = imported_data_sync.table.field_set.all().order_by("id")
|
||||
assert fields[0].read_only is True
|
||||
assert fields[1].read_only is True
|
||||
|
||||
imported_properties = imported_data_sync.synced_properties.all().order_by("key")
|
||||
assert imported_properties[0].key == f"field_{text_field.id}"
|
||||
assert imported_properties[1].key == "id"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(DEBUG=True)
|
||||
def test_import_export_excluding_source_table(enterprise_data_fixture):
|
||||
enterprise_data_fixture.enable_enterprise()
|
||||
|
||||
user = enterprise_data_fixture.create_user()
|
||||
workspace = enterprise_data_fixture.create_workspace(user=user)
|
||||
workspace_2 = enterprise_data_fixture.create_workspace(user=user)
|
||||
database = enterprise_data_fixture.create_database_application(workspace=workspace)
|
||||
database_2 = enterprise_data_fixture.create_database_application(
|
||||
workspace=workspace_2
|
||||
)
|
||||
source_table = enterprise_data_fixture.create_database_table(
|
||||
name="Source", database=database_2
|
||||
)
|
||||
text_field = enterprise_data_fixture.create_text_field(
|
||||
table=source_table, name="Text"
|
||||
)
|
||||
|
||||
handler = DataSyncHandler()
|
||||
|
||||
data_sync = handler.create_data_sync_table(
|
||||
user=user,
|
||||
database=database,
|
||||
table_name="Test",
|
||||
type_name="local_baserow_table",
|
||||
synced_properties=["id", f"field_{text_field.id}"],
|
||||
source_table_id=source_table.id,
|
||||
)
|
||||
properties = data_sync.synced_properties.all().order_by("key")
|
||||
handler.sync_data_sync_table(user=user, data_sync=data_sync)
|
||||
|
||||
database_type = application_type_registry.get("database")
|
||||
config = ImportExportConfig(include_permission_data=True)
|
||||
serialized = database_type.export_serialized(database, config)
|
||||
|
||||
imported_workspace = enterprise_data_fixture.create_workspace()
|
||||
imported_workspace_user = enterprise_data_fixture.create_user_workspace(
|
||||
workspace=imported_workspace, user=user
|
||||
)
|
||||
id_mapping = {}
|
||||
|
||||
imported_database = database_type.import_serialized(
|
||||
imported_workspace,
|
||||
serialized,
|
||||
config,
|
||||
id_mapping,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
imported_table = imported_database.table_set.filter(name="Test").first()
|
||||
with pytest.raises(ObjectDoesNotExist):
|
||||
imported_table.data_sync
|
||||
|
||||
fields = imported_table.field_set.all().order_by("id")
|
||||
assert fields[0].read_only is False
|
||||
assert fields[0].immutable_properties is False
|
||||
assert fields[0].immutable_type is False
|
||||
assert fields[1].read_only is False
|
||||
assert fields[1].immutable_properties is False
|
||||
assert fields[1].immutable_type is False
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
<li
|
||||
class="tree__item"
|
||||
:class="{
|
||||
active: application._.selected,
|
||||
'tree__item--loading': application._.loading,
|
||||
}"
|
||||
>
|
||||
|
|
|
@ -20,6 +20,10 @@
|
|||
:class="{ active: isTableActive(table) }"
|
||||
>
|
||||
<a class="tree__sub-link" @click="selectTable(application, table)">
|
||||
<i
|
||||
v-if="table.data_sync"
|
||||
class="context__menu-item-icon iconoir-data-transfer-down"
|
||||
></i>
|
||||
{{ table.name }}
|
||||
</a>
|
||||
</li>
|
||||
|
|
Loading…
Add table
Reference in a new issue